1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
37 #include "gcc-rich-location.h"
41 #include "file-prefix-map.h"
43 #include "omp-general.h"
46 /* Forward declarations. */
48 static tree
cp_genericize_r (tree
*, int *, void *);
49 static tree
cp_fold_r (tree
*, int *, void *);
50 static void cp_genericize_tree (tree
*, bool);
51 static tree
cp_fold (tree
);
53 /* Genericize a TRY_BLOCK. */
56 genericize_try_block (tree
*stmt_p
)
58 tree body
= TRY_STMTS (*stmt_p
);
59 tree cleanup
= TRY_HANDLERS (*stmt_p
);
61 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
64 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
67 genericize_catch_block (tree
*stmt_p
)
69 tree type
= HANDLER_TYPE (*stmt_p
);
70 tree body
= HANDLER_BODY (*stmt_p
);
72 /* FIXME should the caught type go in TREE_TYPE? */
73 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
76 /* A terser interface for building a representation of an exception
80 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
84 /* FIXME should the allowed types go in TREE_TYPE? */
85 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
86 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
88 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
89 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
94 /* Genericize an EH_SPEC_BLOCK by converting it to a
95 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
98 genericize_eh_spec_block (tree
*stmt_p
)
100 tree body
= EH_SPEC_STMTS (*stmt_p
);
101 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
102 tree failure
= build_call_n (call_unexpected_fn
, 1, build_exc_ptr ());
104 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
105 suppress_warning (*stmt_p
);
106 suppress_warning (TREE_OPERAND (*stmt_p
, 1));
109 /* Return the first non-compound statement in STMT. */
112 first_stmt (tree stmt
)
114 switch (TREE_CODE (stmt
))
117 if (tree_statement_list_node
*p
= STATEMENT_LIST_HEAD (stmt
))
118 return first_stmt (p
->stmt
);
122 return first_stmt (BIND_EXPR_BODY (stmt
));
129 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
132 genericize_if_stmt (tree
*stmt_p
)
134 tree stmt
, cond
, then_
, else_
;
135 location_t locus
= EXPR_LOCATION (*stmt_p
);
138 cond
= IF_COND (stmt
);
139 then_
= THEN_CLAUSE (stmt
);
140 else_
= ELSE_CLAUSE (stmt
);
144 tree ft
= first_stmt (then_
);
145 tree fe
= first_stmt (else_
);
147 if (TREE_CODE (ft
) == PREDICT_EXPR
148 && TREE_CODE (fe
) == PREDICT_EXPR
149 && (pr
= PREDICT_EXPR_PREDICTOR (ft
)) == PREDICT_EXPR_PREDICTOR (fe
)
150 && (pr
== PRED_HOT_LABEL
|| pr
== PRED_COLD_LABEL
))
152 gcc_rich_location
richloc (EXPR_LOC_OR_LOC (ft
, locus
));
153 richloc
.add_range (EXPR_LOC_OR_LOC (fe
, locus
));
154 warning_at (&richloc
, OPT_Wattributes
,
155 "both branches of %<if%> statement marked as %qs",
156 pr
== PRED_HOT_LABEL
? "likely" : "unlikely");
161 then_
= build_empty_stmt (locus
);
163 else_
= build_empty_stmt (locus
);
165 /* consteval if has been verified not to have the then_/else_ blocks
166 entered by gotos/case labels from elsewhere, and as then_ block
167 can contain unfolded immediate function calls, we have to discard
168 the then_ block regardless of whether else_ has side-effects or not. */
169 if (IF_STMT_CONSTEVAL_P (stmt
))
171 if (block_may_fallthru (then_
))
172 stmt
= build3 (COND_EXPR
, void_type_node
, boolean_false_node
,
177 else if (IF_STMT_CONSTEXPR_P (stmt
))
178 stmt
= integer_nonzerop (cond
) ? then_
: else_
;
180 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
181 protected_set_expr_location_if_unset (stmt
, locus
);
185 /* Hook into the middle of gimplifying an OMP_FOR node. */
187 static enum gimplify_status
188 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
190 tree for_stmt
= *expr_p
;
191 gimple_seq seq
= NULL
;
193 /* Protect ourselves from recursion. */
194 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
196 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
198 gimplify_and_add (for_stmt
, &seq
);
199 gimple_seq_add_seq (pre_p
, seq
);
201 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
206 /* Gimplify an EXPR_STMT node. */
209 gimplify_expr_stmt (tree
*stmt_p
)
211 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
213 if (stmt
== error_mark_node
)
216 /* Gimplification of a statement expression will nullify the
217 statement if all its side effects are moved to *PRE_P and *POST_P.
219 In this case we will not want to emit the gimplified statement.
220 However, we may still want to emit a warning, so we do that before
222 if (stmt
&& warn_unused_value
)
224 if (!TREE_SIDE_EFFECTS (stmt
))
226 if (!IS_EMPTY_STMT (stmt
)
227 && !VOID_TYPE_P (TREE_TYPE (stmt
))
228 && !warning_suppressed_p (stmt
, OPT_Wunused_value
))
229 warning (OPT_Wunused_value
, "statement with no effect");
232 warn_if_unused_value (stmt
, input_location
);
235 if (stmt
== NULL_TREE
)
236 stmt
= alloc_stmt_list ();
241 /* Gimplify initialization from an AGGR_INIT_EXPR. */
244 cp_gimplify_init_expr (tree
*expr_p
)
246 tree from
= TREE_OPERAND (*expr_p
, 1);
247 tree to
= TREE_OPERAND (*expr_p
, 0);
250 if (TREE_CODE (from
) == TARGET_EXPR
)
251 if (tree init
= TARGET_EXPR_INITIAL (from
))
253 /* Make sure that we expected to elide this temporary. But also allow
254 gimplify_modify_expr_rhs to elide temporaries of trivial type. */
255 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from
)
256 || !TREE_ADDRESSABLE (TREE_TYPE (from
)));
257 if (target_expr_needs_replace (from
))
259 /* If this was changed by cp_genericize_target_expr, we need to
260 walk into it to replace uses of the slot. */
261 replace_decl (&init
, TARGET_EXPR_SLOT (from
), to
);
269 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
270 inside the TARGET_EXPR. */
273 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
275 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
276 replace the slot operand with our target.
278 Should we add a target parm to gimplify_expr instead? No, as in this
279 case we want to replace the INIT_EXPR. */
280 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
281 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
283 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
284 AGGR_INIT_EXPR_SLOT (sub
) = to
;
286 VEC_INIT_EXPR_SLOT (sub
) = to
;
289 /* The initialization is now a side-effect, so the container can
292 TREE_TYPE (from
) = void_type_node
;
295 /* Handle aggregate NSDMI. */
296 replace_placeholders (sub
, to
);
301 t
= TREE_OPERAND (t
, 1);
306 /* Gimplify a MUST_NOT_THROW_EXPR. */
308 static enum gimplify_status
309 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
312 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
313 tree body
= TREE_OPERAND (stmt
, 0);
314 gimple_seq try_
= NULL
;
315 gimple_seq catch_
= NULL
;
318 gimplify_and_add (body
, &try_
);
319 mnt
= gimple_build_eh_must_not_throw (terminate_fn
);
320 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
321 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
323 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
334 /* Return TRUE if an operand (OP) of a given TYPE being copied is
335 really just an empty class copy.
337 Check that the operand has a simple form so that TARGET_EXPRs and
338 non-empty CONSTRUCTORs get reduced properly, and we leave the
339 return slot optimization alone because it isn't a copy. */
342 simple_empty_class_p (tree type
, tree op
, tree_code code
)
344 if (TREE_CODE (op
) == COMPOUND_EXPR
)
345 return simple_empty_class_p (type
, TREE_OPERAND (op
, 1), code
);
346 if (SIMPLE_TARGET_EXPR_P (op
)
347 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type
))
348 /* The TARGET_EXPR is itself a simple copy, look through it. */
349 return simple_empty_class_p (type
, TARGET_EXPR_INITIAL (op
), code
);
351 if (TREE_CODE (op
) == PARM_DECL
352 && TREE_ADDRESSABLE (TREE_TYPE (op
)))
354 tree fn
= DECL_CONTEXT (op
);
355 if (DECL_THUNK_P (fn
)
356 || lambda_static_thunk_p (fn
))
357 /* In a thunk, we pass through invisible reference parms, so this isn't
363 (TREE_CODE (op
) == EMPTY_CLASS_EXPR
364 || code
== MODIFY_EXPR
365 || is_gimple_lvalue (op
)
366 || INDIRECT_REF_P (op
)
367 || (TREE_CODE (op
) == CONSTRUCTOR
368 && CONSTRUCTOR_NELTS (op
) == 0)
369 || (TREE_CODE (op
) == CALL_EXPR
370 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
371 && !TREE_CLOBBER_P (op
)
372 && is_really_empty_class (type
, /*ignore_vptr*/true);
375 /* Returns true if evaluating E as an lvalue has side-effects;
376 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
377 have side-effects until there is a read or write through it. */
380 lvalue_has_side_effects (tree e
)
382 if (!TREE_SIDE_EFFECTS (e
))
384 while (handled_component_p (e
))
386 if (TREE_CODE (e
) == ARRAY_REF
387 && TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 1)))
389 e
= TREE_OPERAND (e
, 0);
392 /* Just naming a variable has no side-effects. */
394 else if (INDIRECT_REF_P (e
))
395 /* Similarly, indirection has no side-effects. */
396 return TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 0));
398 /* For anything else, trust TREE_SIDE_EFFECTS. */
399 return TREE_SIDE_EFFECTS (e
);
402 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
403 by expressions with side-effects in other operands. */
405 static enum gimplify_status
406 gimplify_to_rvalue (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
407 bool (*gimple_test_f
) (tree
))
409 enum gimplify_status t
410 = gimplify_expr (expr_p
, pre_p
, post_p
, gimple_test_f
, fb_rvalue
);
413 else if (is_gimple_variable (*expr_p
) && TREE_CODE (*expr_p
) != SSA_NAME
)
414 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
);
418 /* Like gimplify_arg, but if ORDERED is set (which should be set if
419 any of the arguments this argument is sequenced before has
420 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
421 are gimplified into SSA_NAME or a fresh temporary and for
422 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
424 static enum gimplify_status
425 cp_gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
428 enum gimplify_status t
;
430 && !is_gimple_reg_type (TREE_TYPE (*arg_p
))
431 && TREE_CODE (*arg_p
) == TARGET_EXPR
)
433 /* gimplify_arg would strip away the TARGET_EXPR, but
434 that can mean we don't copy the argument and some following
435 argument with side-effect could modify it. */
436 protected_set_expr_location (*arg_p
, call_location
);
437 return gimplify_expr (arg_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_either
);
441 t
= gimplify_arg (arg_p
, pre_p
, call_location
);
445 && is_gimple_reg_type (TREE_TYPE (*arg_p
))
446 && is_gimple_variable (*arg_p
)
447 && TREE_CODE (*arg_p
) != SSA_NAME
448 /* No need to force references into register, references
449 can't be modified. */
450 && !TYPE_REF_P (TREE_TYPE (*arg_p
))
451 /* And this can't be modified either. */
452 && *arg_p
!= current_class_ptr
)
453 *arg_p
= get_initialized_tmp_var (*arg_p
, pre_p
);
459 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
462 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
464 int saved_stmts_are_full_exprs_p
= 0;
465 location_t loc
= cp_expr_loc_or_input_loc (*expr_p
);
466 enum tree_code code
= TREE_CODE (*expr_p
);
467 enum gimplify_status ret
;
469 if (STATEMENT_CODE_P (code
))
471 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
472 current_stmt_tree ()->stmts_are_full_exprs_p
473 = STMT_IS_FULL_EXPR_P (*expr_p
);
479 simplify_aggr_init_expr (expr_p
);
485 *expr_p
= expand_vec_init_expr (NULL_TREE
, *expr_p
,
486 tf_warning_or_error
);
489 cp_walk_tree (expr_p
, cp_fold_r
, &pset
, NULL
);
490 cp_genericize_tree (expr_p
, false);
491 copy_if_shared (expr_p
);
497 /* FIXME communicate throw type to back end, probably by moving
498 THROW_EXPR into ../tree.def. */
499 *expr_p
= TREE_OPERAND (*expr_p
, 0);
503 case MUST_NOT_THROW_EXPR
:
504 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
507 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
508 LHS of an assignment might also be involved in the RHS, as in bug
511 cp_gimplify_init_expr (expr_p
);
512 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
518 /* If the back end isn't clever enough to know that the lhs and rhs
519 types are the same, add an explicit conversion. */
520 tree op0
= TREE_OPERAND (*expr_p
, 0);
521 tree op1
= TREE_OPERAND (*expr_p
, 1);
523 if (!error_operand_p (op0
)
524 && !error_operand_p (op1
)
525 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
526 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
527 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
528 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
529 TREE_TYPE (op0
), op1
);
531 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
, code
))
533 while (TREE_CODE (op1
) == TARGET_EXPR
)
534 /* We're disconnecting the initializer from its target,
535 don't create a temporary. */
536 op1
= TARGET_EXPR_INITIAL (op1
);
538 /* Remove any copies of empty classes. Also drop volatile
539 variables on the RHS to avoid infinite recursion from
540 gimplify_expr trying to load the value. */
541 if (TREE_SIDE_EFFECTS (op1
))
543 if (TREE_THIS_VOLATILE (op1
)
544 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
545 op1
= build_fold_addr_expr (op1
);
547 gimplify_and_add (op1
, pre_p
);
549 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
550 is_gimple_lvalue
, fb_lvalue
);
551 *expr_p
= TREE_OPERAND (*expr_p
, 0);
552 if (code
== RETURN_EXPR
&& REFERENCE_CLASS_P (*expr_p
))
553 /* Avoid 'return *<retval>;' */
554 *expr_p
= TREE_OPERAND (*expr_p
, 0);
556 /* P0145 says that the RHS is sequenced before the LHS.
557 gimplify_modify_expr gimplifies the RHS before the LHS, but that
558 isn't quite strong enough in two cases:
560 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
561 mean it's evaluated after the LHS.
563 2) the value calculation of the RHS is also sequenced before the
564 LHS, so for scalar assignment we need to preevaluate if the
565 RHS could be affected by LHS side-effects even if it has no
566 side-effects of its own. We don't need this for classes because
567 class assignment takes its RHS by reference. */
568 else if (flag_strong_eval_order
> 1
569 && TREE_CODE (*expr_p
) == MODIFY_EXPR
570 && lvalue_has_side_effects (op0
)
571 && (TREE_CODE (op1
) == CALL_EXPR
572 || (SCALAR_TYPE_P (TREE_TYPE (op1
))
573 && !TREE_CONSTANT (op1
))))
574 TREE_OPERAND (*expr_p
, 1) = get_initialized_tmp_var (op1
, pre_p
);
579 case EMPTY_CLASS_EXPR
:
580 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
581 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
586 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
591 genericize_try_block (expr_p
);
596 genericize_catch_block (expr_p
);
601 genericize_eh_spec_block (expr_p
);
621 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
625 gimplify_expr_stmt (expr_p
);
629 case UNARY_PLUS_EXPR
:
631 tree arg
= TREE_OPERAND (*expr_p
, 0);
632 tree type
= TREE_TYPE (*expr_p
);
633 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
641 if (flag_strong_eval_order
== 2
642 && CALL_EXPR_FN (*expr_p
)
643 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
)
644 && cp_get_callee_fndecl_nofold (*expr_p
) == NULL_TREE
)
646 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
647 enum gimplify_status t
648 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
649 is_gimple_call_addr
);
652 /* GIMPLE considers most pointer conversion useless, but for
653 calls we actually care about the exact function pointer type. */
654 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p
)) != fnptrtype
)
655 CALL_EXPR_FN (*expr_p
)
656 = build1 (NOP_EXPR
, fnptrtype
, CALL_EXPR_FN (*expr_p
));
658 if (!CALL_EXPR_FN (*expr_p
))
659 /* Internal function call. */;
660 else if (CALL_EXPR_REVERSE_ARGS (*expr_p
))
662 /* This is a call to a (compound) assignment operator that used
663 the operator syntax; gimplify the RHS first. */
664 gcc_assert (call_expr_nargs (*expr_p
) == 2);
665 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p
));
666 enum gimplify_status t
667 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 1), pre_p
, loc
,
668 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, 0)));
672 else if (CALL_EXPR_ORDERED_ARGS (*expr_p
))
674 /* Leave the last argument for gimplify_call_expr, to avoid problems
675 with __builtin_va_arg_pack(). */
676 int nargs
= call_expr_nargs (*expr_p
) - 1;
677 int last_side_effects_arg
= -1;
678 for (int i
= nargs
; i
> 0; --i
)
679 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
681 last_side_effects_arg
= i
;
684 for (int i
= 0; i
< nargs
; ++i
)
686 enum gimplify_status t
687 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
,
688 i
< last_side_effects_arg
);
693 else if (flag_strong_eval_order
694 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
))
696 /* If flag_strong_eval_order, evaluate the object argument first. */
697 tree fntype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
698 if (INDIRECT_TYPE_P (fntype
))
699 fntype
= TREE_TYPE (fntype
);
700 if (TREE_CODE (fntype
) == METHOD_TYPE
)
702 int nargs
= call_expr_nargs (*expr_p
);
703 bool side_effects
= false;
704 for (int i
= 1; i
< nargs
; ++i
)
705 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
710 enum gimplify_status t
711 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 0), pre_p
, loc
,
719 tree decl
= cp_get_callee_fndecl_nofold (*expr_p
);
720 if (decl
&& fndecl_built_in_p (decl
, BUILT_IN_FRONTEND
))
721 switch (DECL_FE_FUNCTION_CODE (decl
))
723 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
724 *expr_p
= boolean_false_node
;
726 case CP_BUILT_IN_SOURCE_LOCATION
:
728 = fold_builtin_source_location (*expr_p
);
730 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
732 = fold_builtin_is_corresponding_member
733 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
734 &CALL_EXPR_ARG (*expr_p
, 0));
736 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
738 = fold_builtin_is_pointer_inverconvertible_with_class
739 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
740 &CALL_EXPR_ARG (*expr_p
, 0));
749 /* A TARGET_EXPR that expresses direct-initialization should have been
750 elided by cp_gimplify_init_expr. */
751 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p
));
752 /* Likewise, but allow extra temps of trivial type so that
753 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
754 on the rhs of an assignment, as in constexpr-aggr1.C. */
755 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p
)
756 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)));
761 *expr_p
= cplus_expand_constant (*expr_p
);
762 if (TREE_CODE (*expr_p
) == PTRMEM_CST
)
769 if (TREE_OPERAND (*expr_p
, 0)
770 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
771 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
773 expr_p
= &TREE_OPERAND (*expr_p
, 0);
774 /* Avoid going through the INIT_EXPR case, which can
775 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
776 goto modify_expr_case
;
781 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
785 /* Restore saved state. */
786 if (STATEMENT_CODE_P (code
))
787 current_stmt_tree ()->stmts_are_full_exprs_p
788 = saved_stmts_are_full_exprs_p
;
794 is_invisiref_parm (const_tree t
)
796 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
797 && DECL_BY_REFERENCE (t
));
800 /* A stable comparison routine for use with splay trees and DECLs. */
803 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
808 return DECL_UID (a
) - DECL_UID (b
);
811 /* OpenMP context during genericization. */
813 struct cp_genericize_omp_taskreg
817 struct cp_genericize_omp_taskreg
*outer
;
818 splay_tree variables
;
821 /* Return true if genericization should try to determine if
822 DECL is firstprivate or shared within task regions. */
825 omp_var_to_track (tree decl
)
827 tree type
= TREE_TYPE (decl
);
828 if (is_invisiref_parm (decl
))
829 type
= TREE_TYPE (type
);
830 else if (TYPE_REF_P (type
))
831 type
= TREE_TYPE (type
);
832 while (TREE_CODE (type
) == ARRAY_TYPE
)
833 type
= TREE_TYPE (type
);
834 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
836 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
838 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
843 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
846 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
848 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
849 (splay_tree_key
) decl
);
852 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
854 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
855 if (!omp_ctx
->default_shared
)
857 struct cp_genericize_omp_taskreg
*octx
;
859 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
861 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
862 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
864 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
867 if (octx
->is_parallel
)
871 && (TREE_CODE (decl
) == PARM_DECL
872 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
873 && DECL_CONTEXT (decl
) == current_function_decl
)))
874 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
875 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
877 /* DECL is implicitly determined firstprivate in
878 the current task construct. Ensure copy ctor and
879 dtor are instantiated, because during gimplification
880 it will be already too late. */
881 tree type
= TREE_TYPE (decl
);
882 if (is_invisiref_parm (decl
))
883 type
= TREE_TYPE (type
);
884 else if (TYPE_REF_P (type
))
885 type
= TREE_TYPE (type
);
886 while (TREE_CODE (type
) == ARRAY_TYPE
)
887 type
= TREE_TYPE (type
);
888 get_copy_ctor (type
, tf_none
);
889 get_dtor (type
, tf_none
);
892 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
896 /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
897 not expected to elide, e.g. because unsafe_copy_elision_p is true. */
900 any_non_eliding_target_exprs (tree ctor
)
902 for (const constructor_elt
&e
: *CONSTRUCTOR_ELTS (ctor
))
904 if (TREE_CODE (e
.value
) == TARGET_EXPR
905 && !TARGET_EXPR_ELIDING_P (e
.value
))
911 /* If we might need to clean up a partially constructed object, break down the
912 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
913 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
917 cp_genericize_init (tree
*replace
, tree from
, tree to
)
919 tree init
= NULL_TREE
;
920 if (TREE_CODE (from
) == VEC_INIT_EXPR
)
921 init
= expand_vec_init_expr (to
, from
, tf_warning_or_error
);
922 else if (TREE_CODE (from
) == CONSTRUCTOR
923 && TREE_SIDE_EFFECTS (from
)
925 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from
)))
926 || any_non_eliding_target_exprs (from
)))
928 to
= cp_stabilize_reference (to
);
929 replace_placeholders (from
, to
);
930 init
= split_nonconstant_init (to
, from
);
935 if (*replace
== from
)
936 /* Make cp_gimplify_init_expr call replace_decl on this
937 TARGET_EXPR_INITIAL. */
938 init
= fold_convert (void_type_node
, init
);
943 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
946 cp_genericize_init_expr (tree
*stmt_p
)
948 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
949 tree to
= TREE_OPERAND (*stmt_p
, 0);
950 tree from
= TREE_OPERAND (*stmt_p
, 1);
951 if (SIMPLE_TARGET_EXPR_P (from
)
952 /* Return gets confused if we clobber its INIT_EXPR this soon. */
953 && TREE_CODE (to
) != RESULT_DECL
)
954 from
= TARGET_EXPR_INITIAL (from
);
955 cp_genericize_init (stmt_p
, from
, to
);
958 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
959 replace_decl later when we know what we're initializing. */
962 cp_genericize_target_expr (tree
*stmt_p
)
964 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
965 tree slot
= TARGET_EXPR_SLOT (*stmt_p
);
966 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p
),
967 TARGET_EXPR_INITIAL (*stmt_p
), slot
);
968 gcc_assert (!DECL_INITIAL (slot
));
971 /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
972 TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
973 replacement when cp_folding TARGET_EXPR to preserve the invariant that
974 AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
977 maybe_replace_decl (tree
*tp
, tree decl
, tree replacement
)
979 if (!*tp
|| !VOID_TYPE_P (TREE_TYPE (*tp
)))
982 while (TREE_CODE (t
) == COMPOUND_EXPR
)
983 t
= TREE_OPERAND (t
, 1);
984 if (TREE_CODE (t
) == AGGR_INIT_EXPR
)
985 replace_decl (&AGGR_INIT_EXPR_SLOT (t
), decl
, replacement
);
986 else if (TREE_CODE (t
) == VEC_INIT_EXPR
)
987 replace_decl (&VEC_INIT_EXPR_SLOT (t
), decl
, replacement
);
989 replace_decl (tp
, decl
, replacement
);
993 /* Genericization context. */
995 struct cp_genericize_data
997 hash_set
<tree
> *p_set
;
998 auto_vec
<tree
> bind_expr_stack
;
999 struct cp_genericize_omp_taskreg
*omp_ctx
;
1002 bool handle_invisiref_parm_p
;
1005 /* Perform any pre-gimplification folding of C++ front end trees to
1007 Note: The folding of non-omp cases is something to move into
1008 the middle-end. As for now we have most foldings only on GENERIC
1009 in fold-const, we need to perform this before transformation to
1014 hash_set
<tree
> pset
;
1015 bool genericize
; // called from cp_fold_function?
1017 cp_fold_data (bool g
): genericize (g
) {}
1021 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
1023 cp_fold_data
*data
= (cp_fold_data
*)data_
;
1024 tree stmt
= *stmt_p
;
1025 enum tree_code code
= TREE_CODE (stmt
);
1030 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt
)) == FUNCTION_DECL
1031 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt
)))
1033 if (!data
->pset
.add (stmt
))
1034 error_at (PTRMEM_CST_LOCATION (stmt
),
1035 "taking address of an immediate function %qD",
1036 PTRMEM_CST_MEMBER (stmt
));
1037 stmt
= *stmt_p
= build_zero_cst (TREE_TYPE (stmt
));
1043 if (TREE_CODE (TREE_OPERAND (stmt
, 0)) == FUNCTION_DECL
1044 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt
, 0)))
1046 error_at (EXPR_LOCATION (stmt
),
1047 "taking address of an immediate function %qD",
1048 TREE_OPERAND (stmt
, 0));
1049 stmt
= *stmt_p
= build_zero_cst (TREE_TYPE (stmt
));
1058 *stmt_p
= stmt
= cp_fold (*stmt_p
);
1060 if (data
->pset
.add (stmt
))
1062 /* Don't walk subtrees of stmts we've already walked once, otherwise
1063 we can have exponential complexity with e.g. lots of nested
1064 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1065 always the same tree, which the first time cp_fold_r has been
1066 called on it had the subtrees walked. */
1071 code
= TREE_CODE (stmt
);
1078 case OMP_DISTRIBUTE
:
1082 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
1083 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
1084 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
1085 x
= OMP_FOR_COND (stmt
);
1086 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
1088 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
1089 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
1091 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
1093 n
= TREE_VEC_LENGTH (x
);
1094 for (i
= 0; i
< n
; i
++)
1096 tree o
= TREE_VEC_ELT (x
, i
);
1097 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
1098 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1101 x
= OMP_FOR_INCR (stmt
);
1102 if (x
&& TREE_CODE (x
) == TREE_VEC
)
1104 n
= TREE_VEC_LENGTH (x
);
1105 for (i
= 0; i
< n
; i
++)
1107 tree o
= TREE_VEC_ELT (x
, i
);
1108 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
1109 o
= TREE_OPERAND (o
, 1);
1110 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
1111 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
1113 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
1114 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1118 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1123 if (IF_STMT_CONSTEVAL_P (stmt
))
1125 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1126 boolean_false_node. */
1127 cp_walk_tree (&ELSE_CLAUSE (stmt
), cp_fold_r
, data
, NULL
);
1128 cp_walk_tree (&IF_SCOPE (stmt
), cp_fold_r
, data
, NULL
);
1134 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1135 here rather than in cp_genericize to avoid problems with the invisible
1136 reference transition. */
1138 if (data
->genericize
)
1139 cp_genericize_init_expr (stmt_p
);
1143 if (data
->genericize
)
1144 cp_genericize_target_expr (stmt_p
);
1146 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1147 that case, strip it in favor of this one. */
1148 if (tree
&init
= TARGET_EXPR_INITIAL (stmt
))
1150 cp_walk_tree (&init
, cp_fold_r
, data
, NULL
);
1151 cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt
), cp_fold_r
, data
, NULL
);
1153 if (TREE_CODE (init
) == TARGET_EXPR
)
1155 tree sub
= TARGET_EXPR_INITIAL (init
);
1156 maybe_replace_decl (&sub
, TARGET_EXPR_SLOT (init
),
1157 TARGET_EXPR_SLOT (stmt
));
1170 /* Fold ALL the trees! FIXME we should be able to remove this, but
1171 apparently that still causes optimization regressions. */
1174 cp_fold_function (tree fndecl
)
1176 cp_fold_data
data (/*genericize*/true);
1177 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &data
, NULL
);
1180 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1182 static tree
genericize_spaceship (tree expr
)
1184 iloc_sentinel
s (cp_expr_location (expr
));
1185 tree type
= TREE_TYPE (expr
);
1186 tree op0
= TREE_OPERAND (expr
, 0);
1187 tree op1
= TREE_OPERAND (expr
, 1);
1188 return genericize_spaceship (input_location
, type
, op0
, op1
);
1191 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1192 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1193 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1194 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1197 predeclare_vla (tree expr
)
1199 tree type
= TREE_TYPE (expr
);
1200 if (type
== error_mark_node
)
1202 if (is_typedef_decl (expr
))
1203 type
= DECL_ORIGINAL_TYPE (expr
);
1205 /* We need to strip pointers for gimplify_type_sizes. */
1207 while (POINTER_TYPE_P (vla
))
1209 if (TYPE_NAME (vla
))
1211 vla
= TREE_TYPE (vla
);
1213 if (vla
== type
|| TYPE_NAME (vla
)
1214 || !variably_modified_type_p (vla
, NULL_TREE
))
1217 tree decl
= build_decl (input_location
, TYPE_DECL
, NULL_TREE
, vla
);
1218 DECL_ARTIFICIAL (decl
) = 1;
1219 TYPE_NAME (vla
) = decl
;
1220 tree dexp
= build_stmt (input_location
, DECL_EXPR
, decl
);
1228 expr
= build2 (COMPOUND_EXPR
, type
, dexp
, expr
);
1233 /* Perform any pre-gimplification lowering of C++ front end trees to
1237 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1239 tree stmt
= *stmt_p
;
1240 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1241 hash_set
<tree
> *p_set
= wtd
->p_set
;
1243 /* If in an OpenMP context, note var uses. */
1244 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1246 || TREE_CODE (stmt
) == PARM_DECL
1247 || TREE_CODE (stmt
) == RESULT_DECL
)
1248 && omp_var_to_track (stmt
))
1249 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1251 /* Don't dereference parms in a thunk, pass the references through. */
1252 if ((TREE_CODE (stmt
) == CALL_EXPR
&& call_from_lambda_thunk_p (stmt
))
1253 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1259 /* Dereference invisible reference parms. */
1260 if (wtd
->handle_invisiref_parm_p
&& is_invisiref_parm (stmt
))
1262 *stmt_p
= convert_from_reference (stmt
);
1263 p_set
->add (*stmt_p
);
1268 /* Map block scope extern declarations to visible declarations with the
1269 same name and type in outer scopes if any. */
1270 if (VAR_OR_FUNCTION_DECL_P (stmt
) && DECL_LOCAL_DECL_P (stmt
))
1271 if (tree alias
= DECL_LOCAL_DECL_ALIAS (stmt
))
1273 if (alias
!= error_mark_node
)
1276 TREE_USED (alias
) |= TREE_USED (stmt
);
1282 if (TREE_CODE (stmt
) == INTEGER_CST
1283 && TYPE_REF_P (TREE_TYPE (stmt
))
1284 && (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1285 && !wtd
->no_sanitize_p
)
1287 ubsan_maybe_instrument_reference (stmt_p
);
1288 if (*stmt_p
!= stmt
)
1295 /* Other than invisiref parms, don't walk the same tree twice. */
1296 if (p_set
->contains (stmt
))
1302 switch (TREE_CODE (stmt
))
1305 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1307 /* If in an OpenMP context, note var uses. */
1308 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1309 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1310 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1311 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1317 if (TREE_OPERAND (stmt
, 0) && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1318 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1323 switch (OMP_CLAUSE_CODE (stmt
))
1325 case OMP_CLAUSE_LASTPRIVATE
:
1326 /* Don't dereference an invisiref in OpenMP clauses. */
1327 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1330 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1331 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1332 cp_genericize_r
, data
, NULL
);
1335 case OMP_CLAUSE_PRIVATE
:
1336 /* Don't dereference an invisiref in OpenMP clauses. */
1337 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1339 else if (wtd
->omp_ctx
!= NULL
)
1341 /* Private clause doesn't cause any references to the
1342 var in outer contexts, avoid calling
1343 omp_cxx_notice_variable for it. */
1344 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1345 wtd
->omp_ctx
= NULL
;
1346 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1352 case OMP_CLAUSE_SHARED
:
1353 case OMP_CLAUSE_FIRSTPRIVATE
:
1354 case OMP_CLAUSE_COPYIN
:
1355 case OMP_CLAUSE_COPYPRIVATE
:
1356 case OMP_CLAUSE_INCLUSIVE
:
1357 case OMP_CLAUSE_EXCLUSIVE
:
1358 /* Don't dereference an invisiref in OpenMP clauses. */
1359 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1362 case OMP_CLAUSE_REDUCTION
:
1363 case OMP_CLAUSE_IN_REDUCTION
:
1364 case OMP_CLAUSE_TASK_REDUCTION
:
1365 /* Don't dereference an invisiref in reduction clause's
1366 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1367 still needs to be genericized. */
1368 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1371 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1372 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1373 cp_genericize_r
, data
, NULL
);
1374 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1375 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1376 cp_genericize_r
, data
, NULL
);
1384 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1385 to lower this construct before scanning it, so we need to lower these
1386 before doing anything else. */
1388 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1389 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1392 CLEANUP_BODY (stmt
),
1393 CLEANUP_EXPR (stmt
));
1397 genericize_if_stmt (stmt_p
);
1398 /* *stmt_p has changed, tail recurse to handle it again. */
1399 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1401 /* COND_EXPR might have incompatible types in branches if one or both
1402 arms are bitfields. Fix it up now. */
1406 = (TREE_OPERAND (stmt
, 1)
1407 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1410 = (TREE_OPERAND (stmt
, 2)
1411 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1414 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1415 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1417 TREE_OPERAND (stmt
, 1)
1418 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1419 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1423 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1424 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1426 TREE_OPERAND (stmt
, 2)
1427 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1428 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1435 if (UNLIKELY (wtd
->omp_ctx
!= NULL
))
1438 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1440 && !DECL_EXTERNAL (decl
)
1441 && omp_var_to_track (decl
))
1444 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1445 (splay_tree_key
) decl
);
1447 splay_tree_insert (wtd
->omp_ctx
->variables
,
1448 (splay_tree_key
) decl
,
1450 ? OMP_CLAUSE_DEFAULT_SHARED
1451 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1454 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1456 /* The point here is to not sanitize static initializers. */
1457 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1458 wtd
->no_sanitize_p
= true;
1459 for (tree decl
= BIND_EXPR_VARS (stmt
);
1461 decl
= DECL_CHAIN (decl
))
1463 && TREE_STATIC (decl
)
1464 && DECL_INITIAL (decl
))
1465 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1466 wtd
->no_sanitize_p
= no_sanitize_p
;
1468 wtd
->bind_expr_stack
.safe_push (stmt
);
1469 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1470 cp_genericize_r
, data
, NULL
);
1471 wtd
->bind_expr_stack
.pop ();
1474 case ASSERTION_STMT
:
1475 case PRECONDITION_STMT
:
1476 case POSTCONDITION_STMT
:
1478 if (tree check
= build_contract_check (stmt
))
1481 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1484 /* If we didn't build a check, replace it with void_node so we don't
1485 leak contracts into GENERIC. */
1486 *stmt_p
= void_node
;
1493 tree block
= NULL_TREE
;
1495 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1496 BLOCK, and append an IMPORTED_DECL to its
1497 BLOCK_VARS chained list. */
1498 if (wtd
->bind_expr_stack
.exists ())
1501 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1502 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1507 tree decl
= TREE_OPERAND (stmt
, 0);
1510 if (undeduced_auto_decl (decl
))
1511 /* Omit from the GENERIC, the back-end can't handle it. */;
1514 tree using_directive
= make_node (IMPORTED_DECL
);
1515 TREE_TYPE (using_directive
) = void_type_node
;
1516 DECL_CONTEXT (using_directive
) = current_function_decl
;
1518 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
) = decl
;
1519 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1520 BLOCK_VARS (block
) = using_directive
;
1523 /* The USING_STMT won't appear in GENERIC. */
1524 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1530 if (TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1532 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1533 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1538 tree d
= DECL_EXPR_DECL (stmt
);
1540 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1548 struct cp_genericize_omp_taskreg omp_ctx
;
1553 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1554 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1555 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1556 omp_ctx
.outer
= wtd
->omp_ctx
;
1557 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1558 wtd
->omp_ctx
= &omp_ctx
;
1559 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1560 switch (OMP_CLAUSE_CODE (c
))
1562 case OMP_CLAUSE_SHARED
:
1563 case OMP_CLAUSE_PRIVATE
:
1564 case OMP_CLAUSE_FIRSTPRIVATE
:
1565 case OMP_CLAUSE_LASTPRIVATE
:
1566 decl
= OMP_CLAUSE_DECL (c
);
1567 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1569 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1572 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1573 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1574 ? OMP_CLAUSE_DEFAULT_SHARED
1575 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1576 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
&& omp_ctx
.outer
)
1577 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1579 case OMP_CLAUSE_DEFAULT
:
1580 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1581 omp_ctx
.default_shared
= true;
1585 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1586 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
1587 cp_genericize_r
, cp_walk_subtrees
);
1589 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1590 wtd
->omp_ctx
= omp_ctx
.outer
;
1591 splay_tree_delete (omp_ctx
.variables
);
1596 cfun
->has_omp_target
= true;
1602 tree try_block
= wtd
->try_block
;
1603 wtd
->try_block
= stmt
;
1604 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1605 wtd
->try_block
= try_block
;
1606 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1610 case MUST_NOT_THROW_EXPR
:
1611 /* MUST_NOT_THROW_COND might be something else with TM. */
1612 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1615 tree try_block
= wtd
->try_block
;
1616 wtd
->try_block
= stmt
;
1617 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1618 wtd
->try_block
= try_block
;
1624 location_t loc
= location_of (stmt
);
1625 if (warning_suppressed_p (stmt
/* What warning? */))
1627 else if (wtd
->try_block
)
1629 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
)
1631 auto_diagnostic_group d
;
1632 if (warning_at (loc
, OPT_Wterminate
,
1633 "%<throw%> will always call %<terminate%>")
1634 && cxx_dialect
>= cxx11
1635 && DECL_DESTRUCTOR_P (current_function_decl
))
1636 inform (loc
, "in C++11 destructors default to %<noexcept%>");
1641 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1642 && DECL_DESTRUCTOR_P (current_function_decl
)
1643 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1645 && (get_defaulted_eh_spec (current_function_decl
)
1646 == empty_except_spec
))
1647 warning_at (loc
, OPT_Wc__11_compat
,
1648 "in C++11 this %<throw%> will call %<terminate%> "
1649 "because destructors default to %<noexcept%>");
1655 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt
)));
1656 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1659 case SPACESHIP_EXPR
:
1660 *stmt_p
= genericize_spaceship (*stmt_p
);
1664 /* By the time we get here we're handing off to the back end, so we don't
1665 need or want to preserve PTRMEM_CST anymore. */
1666 *stmt_p
= cplus_expand_constant (stmt
);
1671 /* For MEM_REF, make sure not to sanitize the second operand even
1672 if it has reference type. It is just an offset with a type
1673 holding other information. There is no other processing we
1674 need to do for INTEGER_CSTs, so just ignore the second argument
1676 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1681 *stmt_p
= predeclare_vla (*stmt_p
);
1682 if (!wtd
->no_sanitize_p
1683 && sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)
1684 && TYPE_REF_P (TREE_TYPE (stmt
)))
1685 ubsan_maybe_instrument_reference (stmt_p
);
1689 /* Evaluate function concept checks instead of treating them as
1690 normal functions. */
1691 if (concept_check_p (stmt
))
1693 *stmt_p
= evaluate_concept_check (stmt
);
1694 * walk_subtrees
= 0;
1698 if (!wtd
->no_sanitize_p
1699 && sanitize_flags_p ((SANITIZE_NULL
1700 | SANITIZE_ALIGNMENT
| SANITIZE_VPTR
)))
1702 tree fn
= CALL_EXPR_FN (stmt
);
1704 && !error_operand_p (fn
)
1705 && INDIRECT_TYPE_P (TREE_TYPE (fn
))
1706 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1709 = TREE_CODE (fn
) == ADDR_EXPR
1710 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1711 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1712 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1713 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1714 if (sanitize_flags_p (SANITIZE_VPTR
) && !is_ctor
)
1715 cp_ubsan_maybe_instrument_member_call (stmt
);
1717 else if (fn
== NULL_TREE
1718 && CALL_EXPR_IFN (stmt
) == IFN_UBSAN_NULL
1719 && TREE_CODE (CALL_EXPR_ARG (stmt
, 0)) == INTEGER_CST
1720 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt
, 0))))
1724 case AGGR_INIT_EXPR
:
1725 /* For calls to a multi-versioned function, overload resolution
1726 returns the function with the highest target priority, that is,
1727 the version that will checked for dispatching first. If this
1728 version is inlinable, a direct call to this version can be made
1729 otherwise the call should go through the dispatcher. */
1731 tree fn
= cp_get_callee_fndecl_nofold (stmt
);
1732 if (fn
&& DECL_FUNCTION_VERSIONED (fn
)
1733 && (current_function_decl
== NULL
1734 || !targetm
.target_option
.can_inline_p (current_function_decl
,
1736 if (tree dis
= get_function_version_dispatcher (fn
))
1738 mark_versions_used (dis
);
1739 dis
= build_address (dis
);
1740 if (TREE_CODE (stmt
) == CALL_EXPR
)
1741 CALL_EXPR_FN (stmt
) = dis
;
1743 AGGR_INIT_EXPR_FN (stmt
) = dis
;
1749 if (TARGET_EXPR_INITIAL (stmt
)
1750 && TREE_CODE (TARGET_EXPR_INITIAL (stmt
)) == CONSTRUCTOR
1751 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt
)))
1752 TARGET_EXPR_NO_ELIDE (stmt
) = 1;
1755 case TEMPLATE_ID_EXPR
:
1756 gcc_assert (concept_check_p (stmt
));
1757 /* Emit the value of the concept check. */
1758 *stmt_p
= evaluate_concept_check (stmt
);
1762 case OMP_DISTRIBUTE
:
1763 /* Need to explicitly instantiate copy ctors on class iterators of
1764 composite distribute parallel for. */
1765 if (OMP_FOR_INIT (*stmt_p
) == NULL_TREE
)
1767 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
1768 tree inner
= walk_tree (&OMP_FOR_BODY (*stmt_p
),
1769 find_combined_omp_for
, data
, NULL
);
1770 if (inner
!= NULL_TREE
1771 && TREE_CODE (inner
) == OMP_FOR
)
1773 for (int i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner
)); i
++)
1774 if (OMP_FOR_ORIG_DECLS (inner
)
1775 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
1777 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
1780 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
), i
);
1781 /* Class iterators aren't allowed on OMP_SIMD, so the only
1782 case we need to solve is distribute parallel for. */
1783 gcc_assert (TREE_CODE (inner
) == OMP_FOR
1785 tree orig_decl
= TREE_PURPOSE (orig
);
1786 tree c
, cl
= NULL_TREE
;
1787 for (c
= OMP_FOR_CLAUSES (inner
);
1788 c
; c
= OMP_CLAUSE_CHAIN (c
))
1789 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1790 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
1791 && OMP_CLAUSE_DECL (c
) == orig_decl
)
1796 if (cl
== NULL_TREE
)
1798 for (c
= OMP_PARALLEL_CLAUSES (*data
[1]);
1799 c
; c
= OMP_CLAUSE_CHAIN (c
))
1800 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1801 && OMP_CLAUSE_DECL (c
) == orig_decl
)
1809 orig_decl
= require_complete_type (orig_decl
);
1810 tree inner_type
= TREE_TYPE (orig_decl
);
1811 if (orig_decl
== error_mark_node
)
1813 if (TYPE_REF_P (TREE_TYPE (orig_decl
)))
1814 inner_type
= TREE_TYPE (inner_type
);
1816 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1817 inner_type
= TREE_TYPE (inner_type
);
1818 get_copy_ctor (inner_type
, tf_warning_or_error
);
1835 case STATEMENT_LIST
:
1836 /* These cases are handled by shared code. */
1837 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
1838 cp_genericize_r
, cp_walk_subtrees
);
1842 *stmt_p
= build1_loc (EXPR_LOCATION (stmt
), VIEW_CONVERT_EXPR
,
1843 TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1847 if (IS_TYPE_OR_DECL_P (stmt
))
1852 p_set
->add (*stmt_p
);
1857 /* Lower C++ front end trees to GENERIC in T_P. */
1860 cp_genericize_tree (tree
* t_p
, bool handle_invisiref_parm_p
)
1862 struct cp_genericize_data wtd
;
1864 wtd
.p_set
= new hash_set
<tree
>;
1865 wtd
.bind_expr_stack
.create (0);
1867 wtd
.try_block
= NULL_TREE
;
1868 wtd
.no_sanitize_p
= false;
1869 wtd
.handle_invisiref_parm_p
= handle_invisiref_parm_p
;
1870 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1872 if (sanitize_flags_p (SANITIZE_VPTR
))
1873 cp_ubsan_instrument_member_accesses (t_p
);
1876 /* If a function that should end with a return in non-void
1877 function doesn't obviously end with return, add ubsan
1878 instrumentation code to verify it at runtime. If -fsanitize=return
1879 is not enabled, instrument __builtin_unreachable. */
1882 cp_maybe_instrument_return (tree fndecl
)
1884 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1885 || DECL_CONSTRUCTOR_P (fndecl
)
1886 || DECL_DESTRUCTOR_P (fndecl
)
1887 || !targetm
.warn_func_return (fndecl
))
1890 if (!sanitize_flags_p (SANITIZE_RETURN
, fndecl
)
1891 /* Don't add __builtin_unreachable () if not optimizing, it will not
1892 improve any optimizations in that case, just break UB code.
1893 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1894 UBSan covers this with ubsan_instrument_return above where sufficient
1895 information is provided, while the __builtin_unreachable () below
1896 if return sanitization is disabled will just result in hard to
1897 understand runtime error without location. */
1898 && ((!optimize
&& !flag_unreachable_traps
)
1899 || sanitize_flags_p (SANITIZE_UNREACHABLE
, fndecl
)))
1902 tree t
= DECL_SAVED_TREE (fndecl
);
1905 switch (TREE_CODE (t
))
1908 t
= BIND_EXPR_BODY (t
);
1910 case TRY_FINALLY_EXPR
:
1911 case CLEANUP_POINT_EXPR
:
1912 t
= TREE_OPERAND (t
, 0);
1914 case STATEMENT_LIST
:
1916 tree_stmt_iterator i
= tsi_last (t
);
1917 while (!tsi_end_p (i
))
1919 tree p
= tsi_stmt (i
);
1920 if (TREE_CODE (p
) != DEBUG_BEGIN_STMT
)
1940 tree
*p
= &DECL_SAVED_TREE (fndecl
);
1941 if (TREE_CODE (*p
) == BIND_EXPR
)
1942 p
= &BIND_EXPR_BODY (*p
);
1944 location_t loc
= DECL_SOURCE_LOCATION (fndecl
);
1945 if (sanitize_flags_p (SANITIZE_RETURN
, fndecl
))
1946 t
= ubsan_instrument_return (loc
);
1948 t
= build_builtin_unreachable (BUILTINS_LOCATION
);
1950 append_to_statement_list (t
, p
);
1954 cp_genericize (tree fndecl
)
1958 /* Fix up the types of parms passed by invisible reference. */
1959 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1960 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1962 /* If a function's arguments are copied to create a thunk,
1963 then DECL_BY_REFERENCE will be set -- but the type of the
1964 argument will be a pointer type, so we will never get
1966 gcc_assert (!DECL_BY_REFERENCE (t
));
1967 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1968 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1969 DECL_BY_REFERENCE (t
) = 1;
1970 TREE_ADDRESSABLE (t
) = 0;
1974 /* Do the same for the return value. */
1975 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1977 t
= DECL_RESULT (fndecl
);
1978 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1979 DECL_BY_REFERENCE (t
) = 1;
1980 TREE_ADDRESSABLE (t
) = 0;
1984 /* Adjust DECL_VALUE_EXPR of the original var. */
1985 tree outer
= outer_curly_brace_block (current_function_decl
);
1989 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1991 && DECL_NAME (t
) == DECL_NAME (var
)
1992 && DECL_HAS_VALUE_EXPR_P (var
)
1993 && DECL_VALUE_EXPR (var
) == t
)
1995 tree val
= convert_from_reference (t
);
1996 SET_DECL_VALUE_EXPR (var
, val
);
2002 /* If we're a clone, the body is already GIMPLE. */
2003 if (DECL_CLONED_FUNCTION_P (fndecl
))
2006 /* Allow cp_genericize calls to be nested. */
2007 bc_state_t save_state
;
2008 save_bc_state (&save_state
);
2010 /* We do want to see every occurrence of the parms, so we can't just use
2011 walk_tree's hash functionality. */
2012 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
), true);
2014 cp_maybe_instrument_return (fndecl
);
2016 /* Do everything else. */
2017 c_genericize (fndecl
);
2018 restore_bc_state (&save_state
);
2021 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2022 NULL if there is in fact nothing to do. ARG2 may be null if FN
2023 actually only takes one argument. */
2026 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
2028 tree defparm
, parm
, t
;
2036 nargs
= list_length (DECL_ARGUMENTS (fn
));
2037 argarray
= XALLOCAVEC (tree
, nargs
);
2039 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
2041 defparm
= TREE_CHAIN (defparm
);
2043 bool is_method
= TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
;
2044 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
2046 tree inner_type
= TREE_TYPE (arg1
);
2047 tree start1
, end1
, p1
;
2048 tree start2
= NULL
, p2
= NULL
;
2049 tree ret
= NULL
, lab
;
2055 inner_type
= TREE_TYPE (inner_type
);
2056 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
2057 size_zero_node
, NULL
, NULL
);
2059 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
2060 size_zero_node
, NULL
, NULL
);
2062 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
2063 start1
= build_fold_addr_expr_loc (input_location
, start1
);
2065 start2
= build_fold_addr_expr_loc (input_location
, start2
);
2067 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
2068 end1
= fold_build_pointer_plus (start1
, end1
);
2070 p1
= create_tmp_var (TREE_TYPE (start1
));
2071 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
2072 append_to_statement_list (t
, &ret
);
2076 p2
= create_tmp_var (TREE_TYPE (start2
));
2077 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
2078 append_to_statement_list (t
, &ret
);
2081 lab
= create_artificial_label (input_location
);
2082 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
2083 append_to_statement_list (t
, &ret
);
2088 /* Handle default arguments. */
2089 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2090 parm
= TREE_CHAIN (parm
), i
++)
2091 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2092 TREE_PURPOSE (parm
), fn
,
2093 i
- is_method
, tf_warning_or_error
);
2094 t
= build_call_a (fn
, i
, argarray
);
2095 t
= fold_convert (void_type_node
, t
);
2096 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2097 append_to_statement_list (t
, &ret
);
2099 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
2100 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
2101 append_to_statement_list (t
, &ret
);
2105 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
2106 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
2107 append_to_statement_list (t
, &ret
);
2110 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
2111 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
2112 append_to_statement_list (t
, &ret
);
2118 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
2120 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
2121 /* Handle default arguments. */
2122 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2123 parm
= TREE_CHAIN (parm
), i
++)
2124 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2125 TREE_PURPOSE (parm
), fn
,
2126 i
- is_method
, tf_warning_or_error
);
2127 t
= build_call_a (fn
, i
, argarray
);
2128 t
= fold_convert (void_type_node
, t
);
2129 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2133 /* Return code to initialize DECL with its default constructor, or
2134 NULL if there's nothing to do. */
2137 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
2139 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2143 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
2148 /* Return code to initialize DST with a copy constructor from SRC. */
2151 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
2153 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2157 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
2159 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2164 /* Similarly, except use an assignment operator instead. */
2167 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
2169 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2173 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
2175 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2180 /* Return code to destroy DECL. */
2183 cxx_omp_clause_dtor (tree clause
, tree decl
)
2185 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2189 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
2194 /* True if OpenMP should privatize what this DECL points to rather
2195 than the DECL itself. */
2198 cxx_omp_privatize_by_reference (const_tree decl
)
2200 return (TYPE_REF_P (TREE_TYPE (decl
))
2201 || is_invisiref_parm (decl
));
2204 /* Return true if DECL is const qualified var having no mutable member. */
2206 cxx_omp_const_qual_no_mutable (tree decl
)
2208 tree type
= TREE_TYPE (decl
);
2209 if (TYPE_REF_P (type
))
2211 if (!is_invisiref_parm (decl
))
2213 type
= TREE_TYPE (type
);
2215 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
2217 /* NVR doesn't preserve const qualification of the
2219 tree outer
= outer_curly_brace_block (current_function_decl
);
2223 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2225 && DECL_NAME (decl
) == DECL_NAME (var
)
2226 && (TYPE_MAIN_VARIANT (type
)
2227 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
2229 if (TYPE_READONLY (TREE_TYPE (var
)))
2230 type
= TREE_TYPE (var
);
2236 if (type
== error_mark_node
)
2239 /* Variables with const-qualified type having no mutable member
2240 are predetermined shared. */
2241 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
2247 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2248 of DECL is predetermined. */
2250 enum omp_clause_default_kind
2251 cxx_omp_predetermined_sharing_1 (tree decl
)
2253 /* Static data members are predetermined shared. */
2254 if (TREE_STATIC (decl
))
2256 tree ctx
= CP_DECL_CONTEXT (decl
);
2257 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
2258 return OMP_CLAUSE_DEFAULT_SHARED
;
2260 if (c_omp_predefined_variable (decl
))
2261 return OMP_CLAUSE_DEFAULT_SHARED
;
2264 /* this may not be specified in data-sharing clauses, still we need
2265 to predetermined it firstprivate. */
2266 if (decl
== current_class_ptr
)
2267 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2269 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2272 /* Likewise, but also include the artificial vars. We don't want to
2273 disallow the artificial vars being mentioned in explicit clauses,
2274 as we use artificial vars e.g. for loop constructs with random
2275 access iterators other than pointers, but during gimplification
2276 we want to treat them as predetermined. */
2278 enum omp_clause_default_kind
2279 cxx_omp_predetermined_sharing (tree decl
)
2281 enum omp_clause_default_kind ret
= cxx_omp_predetermined_sharing_1 (decl
);
2282 if (ret
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
2285 /* Predetermine artificial variables holding integral values, those
2286 are usually result of gimplify_one_sizepos or SAVE_EXPR
2289 && DECL_ARTIFICIAL (decl
)
2290 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2291 && !(DECL_LANG_SPECIFIC (decl
)
2292 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2293 return OMP_CLAUSE_DEFAULT_SHARED
;
2295 /* Similarly for typeinfo symbols. */
2296 if (VAR_P (decl
) && DECL_ARTIFICIAL (decl
) && DECL_TINFO_P (decl
))
2297 return OMP_CLAUSE_DEFAULT_SHARED
;
2299 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2302 enum omp_clause_defaultmap_kind
2303 cxx_omp_predetermined_mapping (tree decl
)
2305 /* Predetermine artificial variables holding integral values, those
2306 are usually result of gimplify_one_sizepos or SAVE_EXPR
2309 && DECL_ARTIFICIAL (decl
)
2310 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2311 && !(DECL_LANG_SPECIFIC (decl
)
2312 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2313 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
2315 if (c_omp_predefined_variable (decl
))
2316 return OMP_CLAUSE_DEFAULTMAP_TO
;
2318 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
2321 /* Finalize an implicitly determined clause. */
2324 cxx_omp_finish_clause (tree c
, gimple_seq
*, bool /* openacc */)
2326 tree decl
, inner_type
;
2327 bool make_shared
= false;
2329 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
2330 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
2331 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LASTPRIVATE
2332 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)))
2335 decl
= OMP_CLAUSE_DECL (c
);
2336 decl
= require_complete_type (decl
);
2337 inner_type
= TREE_TYPE (decl
);
2338 if (decl
== error_mark_node
)
2340 else if (TYPE_REF_P (TREE_TYPE (decl
)))
2341 inner_type
= TREE_TYPE (inner_type
);
2343 /* We're interested in the base element, not arrays. */
2344 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2345 inner_type
= TREE_TYPE (inner_type
);
2347 /* Check for special function availability by building a call to one.
2348 Save the results, because later we won't be in the right context
2349 for making these queries. */
2350 bool first
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
;
2351 bool last
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
;
2353 && CLASS_TYPE_P (inner_type
)
2354 && cxx_omp_create_clause_info (c
, inner_type
, !first
, first
, last
,
2360 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
2361 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
) = 0;
2362 OMP_CLAUSE_SHARED_READONLY (c
) = 0;
2366 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2367 disregarded in OpenMP construct, because it is going to be
2368 remapped during OpenMP lowering. SHARED is true if DECL
2369 is going to be shared, false if it is going to be privatized. */
2372 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
2377 && DECL_HAS_VALUE_EXPR_P (decl
)
2378 && DECL_ARTIFICIAL (decl
)
2379 && DECL_LANG_SPECIFIC (decl
)
2380 && DECL_OMP_PRIVATIZED_MEMBER (decl
))
2382 if (VAR_P (decl
) && DECL_CONTEXT (decl
) && is_capture_proxy (decl
))
2387 /* Fold expression X which is used as an rvalue if RVAL is true. */
2390 cp_fold_maybe_rvalue (tree x
, bool rval
)
2396 x
= mark_rvalue_use (x
);
2397 if (rval
&& DECL_P (x
)
2398 && !TYPE_REF_P (TREE_TYPE (x
)))
2400 tree v
= decl_constant_value (x
);
2401 if (v
!= x
&& v
!= error_mark_node
)
2412 /* Fold expression X which is used as an rvalue. */
2415 cp_fold_rvalue (tree x
)
2417 return cp_fold_maybe_rvalue (x
, true);
2420 /* Perform folding on expression X. */
2423 cp_fully_fold (tree x
)
2425 if (processing_template_decl
)
2427 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2428 have to call both. */
2429 if (cxx_dialect
>= cxx11
)
2431 x
= maybe_constant_value (x
);
2432 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2433 a TARGET_EXPR; undo that here. */
2434 if (TREE_CODE (x
) == TARGET_EXPR
)
2435 x
= TARGET_EXPR_INITIAL (x
);
2436 else if (TREE_CODE (x
) == VIEW_CONVERT_EXPR
2437 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONSTRUCTOR
2438 && TREE_TYPE (TREE_OPERAND (x
, 0)) == TREE_TYPE (x
))
2439 x
= TREE_OPERAND (x
, 0);
2441 return cp_fold_rvalue (x
);
2444 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2448 cp_fully_fold_init (tree x
)
2450 if (processing_template_decl
)
2452 x
= cp_fully_fold (x
);
2453 cp_fold_data
data (/*genericize*/false);
2454 cp_walk_tree (&x
, cp_fold_r
, &data
, NULL
);
2458 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2459 and certain changes are made to the folding done. Or should be (FIXME). We
2460 never touch maybe_const, as it is only used for the C front-end
2461 C_MAYBE_CONST_EXPR. */
2464 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
, bool lval
)
2466 return cp_fold_maybe_rvalue (x
, !lval
);
2469 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_cache
;
2471 /* Dispose of the whole FOLD_CACHE. */
2474 clear_fold_cache (void)
2476 if (fold_cache
!= NULL
)
2477 fold_cache
->empty ();
2480 /* This function tries to fold an expression X.
2481 To avoid combinatorial explosion, folding results are kept in fold_cache.
2482 If X is invalid, we don't fold at all.
2483 For performance reasons we don't cache expressions representing a
2484 declaration or constant.
2485 Function returns X or its folded variant. */
2490 tree op0
, op1
, op2
, op3
;
2491 tree org_x
= x
, r
= NULL_TREE
;
2492 enum tree_code code
;
2494 bool rval_ops
= true;
2496 if (!x
|| x
== error_mark_node
)
2499 if (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
))
2502 /* Don't bother to cache DECLs or constants. */
2503 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
2506 if (fold_cache
== NULL
)
2507 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
2509 if (tree
*cached
= fold_cache
->get (x
))
2512 uid_sensitive_constexpr_evaluation_checker c
;
2514 code
= TREE_CODE (x
);
2517 case CLEANUP_POINT_EXPR
:
2518 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2520 r
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2521 if (!TREE_SIDE_EFFECTS (r
))
2526 x
= fold_sizeof_expr (x
);
2529 case VIEW_CONVERT_EXPR
:
2532 case NON_LVALUE_EXPR
:
2535 if (VOID_TYPE_P (TREE_TYPE (x
)))
2537 /* This is just to make sure we don't end up with casts to
2538 void from error_mark_node. If we just return x, then
2539 cp_fold_r might fold the operand into error_mark_node and
2540 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2541 during gimplification doesn't like such casts.
2542 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2543 folding of the operand should be in the caches and if in cp_fold_r
2544 it will modify it in place. */
2545 op0
= cp_fold (TREE_OPERAND (x
, 0));
2546 if (op0
== error_mark_node
)
2547 x
= error_mark_node
;
2551 loc
= EXPR_LOCATION (x
);
2552 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2554 if (code
== CONVERT_EXPR
2555 && SCALAR_TYPE_P (TREE_TYPE (x
))
2556 && op0
!= void_node
)
2557 /* During parsing we used convert_to_*_nofold; re-convert now using the
2558 folding variants, since fold() doesn't do those transformations. */
2559 x
= fold (convert (TREE_TYPE (x
), op0
));
2560 else if (op0
!= TREE_OPERAND (x
, 0))
2562 if (op0
== error_mark_node
)
2563 x
= error_mark_node
;
2565 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2570 /* Conversion of an out-of-range value has implementation-defined
2571 behavior; the language considers it different from arithmetic
2572 overflow, which is undefined. */
2573 if (TREE_CODE (op0
) == INTEGER_CST
2574 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
2575 TREE_OVERFLOW (x
) = false;
2579 case EXCESS_PRECISION_EXPR
:
2580 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2581 x
= fold_convert_loc (EXPR_LOCATION (x
), TREE_TYPE (x
), op0
);
2585 /* We don't need the decltype(auto) obfuscation anymore. */
2586 if (REF_PARENTHESIZED_P (x
))
2588 tree p
= maybe_undo_parenthesized_ref (x
);
2595 loc
= EXPR_LOCATION (x
);
2596 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), false);
2598 /* Cope with user tricks that amount to offsetof. */
2599 if (op0
!= error_mark_node
2600 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0
)))
2602 tree val
= get_base_address (op0
);
2604 && INDIRECT_REF_P (val
)
2605 && COMPLETE_TYPE_P (TREE_TYPE (val
))
2606 && TREE_CONSTANT (TREE_OPERAND (val
, 0)))
2608 val
= TREE_OPERAND (val
, 0);
2610 val
= maybe_constant_value (val
);
2611 if (TREE_CODE (val
) == INTEGER_CST
)
2612 return fold_offsetof (op0
, TREE_TYPE (x
));
2622 case FIX_TRUNC_EXPR
:
2628 case TRUTH_NOT_EXPR
:
2629 case FIXED_CONVERT_EXPR
:
2632 loc
= EXPR_LOCATION (x
);
2633 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2636 if (op0
!= TREE_OPERAND (x
, 0))
2638 if (op0
== error_mark_node
)
2639 x
= error_mark_node
;
2642 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2643 if (code
== INDIRECT_REF
2644 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
2646 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2647 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2648 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2655 gcc_assert (TREE_CODE (x
) != COND_EXPR
2656 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
2659 case UNARY_PLUS_EXPR
:
2660 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2661 if (op0
== error_mark_node
)
2662 x
= error_mark_node
;
2664 x
= fold_convert (TREE_TYPE (x
), op0
);
2667 case POSTDECREMENT_EXPR
:
2668 case POSTINCREMENT_EXPR
:
2670 case PREDECREMENT_EXPR
:
2671 case PREINCREMENT_EXPR
:
2676 case POINTER_PLUS_EXPR
:
2678 case POINTER_DIFF_EXPR
:
2681 case TRUNC_DIV_EXPR
:
2683 case FLOOR_DIV_EXPR
:
2684 case ROUND_DIV_EXPR
:
2685 case TRUNC_MOD_EXPR
:
2687 case ROUND_MOD_EXPR
:
2689 case EXACT_DIV_EXPR
:
2699 case TRUTH_AND_EXPR
:
2700 case TRUTH_ANDIF_EXPR
:
2702 case TRUTH_ORIF_EXPR
:
2703 case TRUTH_XOR_EXPR
:
2704 case LT_EXPR
: case LE_EXPR
:
2705 case GT_EXPR
: case GE_EXPR
:
2706 case EQ_EXPR
: case NE_EXPR
:
2707 case UNORDERED_EXPR
: case ORDERED_EXPR
:
2708 case UNLT_EXPR
: case UNLE_EXPR
:
2709 case UNGT_EXPR
: case UNGE_EXPR
:
2710 case UNEQ_EXPR
: case LTGT_EXPR
:
2711 case RANGE_EXPR
: case COMPLEX_EXPR
:
2713 loc
= EXPR_LOCATION (x
);
2714 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2715 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1));
2717 /* decltype(nullptr) has only one value, so optimize away all comparisons
2718 with that type right away, keeping them in the IL causes troubles for
2719 various optimizations. */
2720 if (COMPARISON_CLASS_P (org_x
)
2721 && TREE_CODE (TREE_TYPE (op0
)) == NULLPTR_TYPE
2722 && TREE_CODE (TREE_TYPE (op1
)) == NULLPTR_TYPE
)
2727 x
= constant_boolean_node (true, TREE_TYPE (x
));
2730 x
= constant_boolean_node (false, TREE_TYPE (x
));
2735 return omit_two_operands_loc (loc
, TREE_TYPE (x
), x
,
2739 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
2741 if (op0
== error_mark_node
|| op1
== error_mark_node
)
2742 x
= error_mark_node
;
2744 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
2749 /* This is only needed for -Wnonnull-compare and only if
2750 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2751 generation, we do it always. */
2752 if (COMPARISON_CLASS_P (org_x
))
2754 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
2756 else if (COMPARISON_CLASS_P (x
))
2758 if (warn_nonnull_compare
2759 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
2760 suppress_warning (x
, OPT_Wnonnull_compare
);
2762 /* Otherwise give up on optimizing these, let GIMPLE folders
2763 optimize those later on. */
2764 else if (op0
!= TREE_OPERAND (org_x
, 0)
2765 || op1
!= TREE_OPERAND (org_x
, 1))
2767 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
2768 if (warn_nonnull_compare
2769 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
2770 suppress_warning (x
, OPT_Wnonnull_compare
);
2780 loc
= EXPR_LOCATION (x
);
2781 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2782 op1
= cp_fold (TREE_OPERAND (x
, 1));
2783 op2
= cp_fold (TREE_OPERAND (x
, 2));
2785 if (TREE_CODE (TREE_TYPE (x
)) == BOOLEAN_TYPE
)
2787 warning_sentinel
s (warn_int_in_bool_context
);
2788 if (!VOID_TYPE_P (TREE_TYPE (op1
)))
2789 op1
= cp_truthvalue_conversion (op1
, tf_warning_or_error
);
2790 if (!VOID_TYPE_P (TREE_TYPE (op2
)))
2791 op2
= cp_truthvalue_conversion (op2
, tf_warning_or_error
);
2793 else if (VOID_TYPE_P (TREE_TYPE (x
)))
2795 if (TREE_CODE (op0
) == INTEGER_CST
)
2797 /* If the condition is constant, fold can fold away
2798 the COND_EXPR. If some statement-level uses of COND_EXPR
2799 have one of the branches NULL, avoid folding crash. */
2801 op1
= build_empty_stmt (loc
);
2803 op2
= build_empty_stmt (loc
);
2807 /* Otherwise, don't bother folding a void condition, since
2808 it can't produce a constant value. */
2809 if (op0
!= TREE_OPERAND (x
, 0)
2810 || op1
!= TREE_OPERAND (x
, 1)
2811 || op2
!= TREE_OPERAND (x
, 2))
2812 x
= build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2817 if (op0
!= TREE_OPERAND (x
, 0)
2818 || op1
!= TREE_OPERAND (x
, 1)
2819 || op2
!= TREE_OPERAND (x
, 2))
2821 if (op0
== error_mark_node
2822 || op1
== error_mark_node
2823 || op2
== error_mark_node
)
2824 x
= error_mark_node
;
2826 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2831 /* A COND_EXPR might have incompatible types in branches if one or both
2832 arms are bitfields. If folding exposed such a branch, fix it up. */
2833 if (TREE_CODE (x
) != code
2834 && x
!= error_mark_node
2835 && !useless_type_conversion_p (TREE_TYPE (org_x
), TREE_TYPE (x
)))
2836 x
= fold_convert (TREE_TYPE (org_x
), x
);
2842 tree callee
= get_callee_fndecl (x
);
2844 /* "Inline" calls to std::move/forward and other cast-like functions
2845 by simply folding them into a corresponding cast to their return
2846 type. This is cheaper than relying on the middle end to do so, and
2847 also means we avoid generating useless debug info for them at all.
2849 At this point the argument has already been converted into a
2850 reference, so it suffices to use a NOP_EXPR to express the
2852 if ((OPTION_SET_P (flag_fold_simple_inlines
)
2853 ? flag_fold_simple_inlines
2855 && call_expr_nargs (x
) == 1
2856 && decl_in_std_namespace_p (callee
)
2857 && DECL_NAME (callee
) != NULL_TREE
2858 && (id_equal (DECL_NAME (callee
), "move")
2859 || id_equal (DECL_NAME (callee
), "forward")
2860 || id_equal (DECL_NAME (callee
), "addressof")
2861 /* This addressof equivalent is used heavily in libstdc++. */
2862 || id_equal (DECL_NAME (callee
), "__addressof")
2863 || id_equal (DECL_NAME (callee
), "as_const")))
2865 r
= CALL_EXPR_ARG (x
, 0);
2866 /* Check that the return and argument types are sane before
2868 if (INDIRECT_TYPE_P (TREE_TYPE (x
))
2869 && INDIRECT_TYPE_P (TREE_TYPE (r
)))
2871 if (!same_type_p (TREE_TYPE (x
), TREE_TYPE (r
)))
2872 r
= build_nop (TREE_TYPE (x
), r
);
2878 int sv
= optimize
, nw
= sv
;
2880 /* Some built-in function calls will be evaluated at compile-time in
2881 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2882 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2883 if (callee
&& fndecl_built_in_p (callee
) && !optimize
2884 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
2885 && current_function_decl
2886 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
2889 if (callee
&& fndecl_built_in_p (callee
, BUILT_IN_FRONTEND
))
2891 switch (DECL_FE_FUNCTION_CODE (callee
))
2893 /* Defer folding __builtin_is_constant_evaluated. */
2894 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
2896 case CP_BUILT_IN_SOURCE_LOCATION
:
2897 x
= fold_builtin_source_location (x
);
2899 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
2900 x
= fold_builtin_is_corresponding_member
2901 (EXPR_LOCATION (x
), call_expr_nargs (x
),
2902 &CALL_EXPR_ARG (x
, 0));
2904 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
2905 x
= fold_builtin_is_pointer_inverconvertible_with_class
2906 (EXPR_LOCATION (x
), call_expr_nargs (x
),
2907 &CALL_EXPR_ARG (x
, 0));
2916 && fndecl_built_in_p (callee
, CP_BUILT_IN_SOURCE_LOCATION
,
2919 x
= fold_builtin_source_location (x
);
2923 bool changed
= false;
2924 int m
= call_expr_nargs (x
);
2925 for (int i
= 0; i
< m
; i
++)
2927 r
= cp_fold (CALL_EXPR_ARG (x
, i
));
2928 if (r
!= CALL_EXPR_ARG (x
, i
))
2930 if (r
== error_mark_node
)
2932 x
= error_mark_node
;
2937 CALL_EXPR_ARG (x
, i
) = r
;
2941 if (x
== error_mark_node
)
2948 if (TREE_CODE (r
) != CALL_EXPR
)
2956 /* Invoke maybe_constant_value for functions declared
2957 constexpr and not called with AGGR_INIT_EXPRs.
2959 Do constexpr expansion of expressions where the call itself is not
2960 constant, but the call followed by an INDIRECT_REF is. */
2961 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
2963 r
= maybe_constant_value (x
);
2966 if (TREE_CODE (r
) != CALL_EXPR
)
2968 if (DECL_CONSTRUCTOR_P (callee
))
2970 loc
= EXPR_LOCATION (x
);
2971 tree s
= build_fold_indirect_ref_loc (loc
,
2972 CALL_EXPR_ARG (x
, 0));
2973 r
= cp_build_init_expr (s
, r
);
2986 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
2987 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
2988 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
2990 tree op
= cp_fold (p
->value
);
2993 if (op
== error_mark_node
)
2995 x
= error_mark_node
;
3000 nelts
= elts
->copy ();
3001 (*nelts
)[i
].value
= op
;
3006 x
= build_constructor (TREE_TYPE (x
), nelts
);
3007 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x
)
3008 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x
);
3010 if (VECTOR_TYPE_P (TREE_TYPE (x
)))
3016 bool changed
= false;
3017 int n
= TREE_VEC_LENGTH (x
);
3019 for (int i
= 0; i
< n
; i
++)
3021 tree op
= cp_fold (TREE_VEC_ELT (x
, i
));
3022 if (op
!= TREE_VEC_ELT (x
, i
))
3026 TREE_VEC_ELT (x
, i
) = op
;
3035 case ARRAY_RANGE_REF
:
3037 loc
= EXPR_LOCATION (x
);
3038 op0
= cp_fold (TREE_OPERAND (x
, 0));
3039 op1
= cp_fold (TREE_OPERAND (x
, 1));
3040 op2
= cp_fold (TREE_OPERAND (x
, 2));
3041 op3
= cp_fold (TREE_OPERAND (x
, 3));
3043 if (op0
!= TREE_OPERAND (x
, 0)
3044 || op1
!= TREE_OPERAND (x
, 1)
3045 || op2
!= TREE_OPERAND (x
, 2)
3046 || op3
!= TREE_OPERAND (x
, 3))
3048 if (op0
== error_mark_node
3049 || op1
== error_mark_node
3050 || op2
== error_mark_node
3051 || op3
== error_mark_node
)
3052 x
= error_mark_node
;
3055 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
3056 TREE_READONLY (x
) = TREE_READONLY (org_x
);
3057 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
3058 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3066 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3067 folding, evaluates to an invariant. In that case no need to wrap
3068 this folded tree with a SAVE_EXPR. */
3069 r
= cp_fold (TREE_OPERAND (x
, 0));
3070 if (tree_invariant_p (r
))
3075 x
= evaluate_requires_expr (x
);
3082 if (EXPR_P (x
) && TREE_CODE (x
) == code
)
3084 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3085 copy_warning (x
, org_x
);
3088 if (!c
.evaluation_restricted_p ())
3090 fold_cache
->put (org_x
, x
);
3091 /* Prevent that we try to fold an already folded result again. */
3093 fold_cache
->put (x
, x
);
3099 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3102 lookup_hotness_attribute (tree list
)
3104 for (; list
; list
= TREE_CHAIN (list
))
3106 tree name
= get_attribute_name (list
);
3107 if ((is_attribute_p ("hot", name
)
3108 || is_attribute_p ("cold", name
)
3109 || is_attribute_p ("likely", name
)
3110 || is_attribute_p ("unlikely", name
))
3111 && is_attribute_namespace_p ("", list
))
3117 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3120 remove_hotness_attribute (tree list
)
3122 for (tree
*p
= &list
; *p
; )
3125 tree name
= get_attribute_name (l
);
3126 if ((is_attribute_p ("hot", name
)
3127 || is_attribute_p ("cold", name
)
3128 || is_attribute_p ("likely", name
)
3129 || is_attribute_p ("unlikely", name
))
3130 && is_attribute_namespace_p ("", l
))
3132 *p
= TREE_CHAIN (l
);
3135 p
= &TREE_CHAIN (l
);
3140 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3144 process_stmt_hotness_attribute (tree std_attrs
, location_t attrs_loc
)
3146 if (std_attrs
== error_mark_node
)
3148 if (tree attr
= lookup_hotness_attribute (std_attrs
))
3150 tree name
= get_attribute_name (attr
);
3151 bool hot
= (is_attribute_p ("hot", name
)
3152 || is_attribute_p ("likely", name
));
3153 tree pred
= build_predict_expr (hot
? PRED_HOT_LABEL
: PRED_COLD_LABEL
,
3154 hot
? TAKEN
: NOT_TAKEN
);
3155 SET_EXPR_LOCATION (pred
, attrs_loc
);
3157 if (tree other
= lookup_hotness_attribute (TREE_CHAIN (attr
)))
3158 warning (OPT_Wattributes
, "ignoring attribute %qE after earlier %qE",
3159 get_attribute_name (other
), name
);
3160 std_attrs
= remove_hotness_attribute (std_attrs
);
3165 /* Build IFN_ASSUME internal call for assume condition ARG. */
3168 build_assume_call (location_t loc
, tree arg
)
3170 if (!processing_template_decl
)
3171 arg
= fold_build_cleanup_point_expr (TREE_TYPE (arg
), arg
);
3172 return build_call_expr_internal_loc (loc
, IFN_ASSUME
, void_type_node
,
3176 /* If [[assume (cond)]] appears on this statement, handle it. */
3179 process_stmt_assume_attribute (tree std_attrs
, tree statement
,
3180 location_t attrs_loc
)
3182 if (std_attrs
== error_mark_node
)
3184 tree attr
= lookup_attribute ("gnu", "assume", std_attrs
);
3187 /* The next token after the assume attribute is not ';'. */
3190 warning_at (attrs_loc
, OPT_Wattributes
,
3191 "%<assume%> attribute not followed by %<;%>");
3194 for (; attr
; attr
= lookup_attribute ("gnu", "assume", TREE_CHAIN (attr
)))
3196 tree args
= TREE_VALUE (attr
);
3197 int nargs
= list_length (args
);
3200 auto_diagnostic_group d
;
3201 error_at (attrs_loc
, "wrong number of arguments specified for "
3202 "%qE attribute", get_attribute_name (attr
));
3203 inform (attrs_loc
, "expected %i, found %i", 1, nargs
);
3207 tree arg
= TREE_VALUE (args
);
3208 if (!type_dependent_expression_p (arg
))
3209 arg
= contextual_conv_bool (arg
, tf_warning_or_error
);
3210 if (error_operand_p (arg
))
3212 finish_expr_stmt (build_assume_call (attrs_loc
, arg
));
3215 return remove_attribute ("gnu", "assume", std_attrs
);
3218 /* Return the type std::source_location::__impl after performing
3219 verification on it. */
3222 get_source_location_impl_type ()
3224 tree name
= get_identifier ("source_location");
3225 tree decl
= lookup_qualified_name (std_node
, name
);
3226 if (TREE_CODE (decl
) != TYPE_DECL
)
3228 auto_diagnostic_group d
;
3229 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3230 qualified_name_lookup_error (std_node
, name
, decl
, input_location
);
3232 error ("%qD is not a type", decl
);
3233 return error_mark_node
;
3235 name
= get_identifier ("__impl");
3236 tree type
= TREE_TYPE (decl
);
3237 decl
= lookup_qualified_name (type
, name
);
3238 if (TREE_CODE (decl
) != TYPE_DECL
)
3240 auto_diagnostic_group d
;
3241 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3242 qualified_name_lookup_error (type
, name
, decl
, input_location
);
3244 error ("%qD is not a type", decl
);
3245 return error_mark_node
;
3247 type
= TREE_TYPE (decl
);
3248 if (TREE_CODE (type
) != RECORD_TYPE
)
3250 error ("%qD is not a class type", decl
);
3251 return error_mark_node
;
3255 for (tree field
= TYPE_FIELDS (type
);
3256 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3257 field
= DECL_CHAIN (field
))
3259 if (DECL_NAME (field
) != NULL_TREE
)
3261 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3262 if (strcmp (n
, "_M_file_name") == 0
3263 || strcmp (n
, "_M_function_name") == 0)
3265 if (TREE_TYPE (field
) != const_string_type_node
)
3267 error ("%qD does not have %<const char *%> type", field
);
3268 return error_mark_node
;
3273 else if (strcmp (n
, "_M_line") == 0 || strcmp (n
, "_M_column") == 0)
3275 if (TREE_CODE (TREE_TYPE (field
)) != INTEGER_TYPE
)
3277 error ("%qD does not have integral type", field
);
3278 return error_mark_node
;
3289 error ("%<std::source_location::__impl%> does not contain only "
3290 "non-static data members %<_M_file_name%>, "
3291 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3292 return error_mark_node
;
3294 return build_qualified_type (type
, TYPE_QUAL_CONST
);
3297 /* Type for source_location_table hash_set. */
3298 struct GTY((for_user
)) source_location_table_entry
{
3304 /* Traits class for function start hash maps below. */
3306 struct source_location_table_entry_hash
3307 : ggc_remove
<source_location_table_entry
>
3309 typedef source_location_table_entry value_type
;
3310 typedef source_location_table_entry compare_type
;
3313 hash (const source_location_table_entry
&ref
)
3315 inchash::hash
hstate (0);
3316 hstate
.add_int (ref
.loc
);
3317 hstate
.add_int (ref
.uid
);
3318 return hstate
.end ();
3322 equal (const source_location_table_entry
&ref1
,
3323 const source_location_table_entry
&ref2
)
3325 return ref1
.loc
== ref2
.loc
&& ref1
.uid
== ref2
.uid
;
3329 mark_deleted (source_location_table_entry
&ref
)
3331 ref
.loc
= UNKNOWN_LOCATION
;
3333 ref
.var
= NULL_TREE
;
3336 static const bool empty_zero_p
= true;
3339 mark_empty (source_location_table_entry
&ref
)
3341 ref
.loc
= UNKNOWN_LOCATION
;
3343 ref
.var
= NULL_TREE
;
3347 is_deleted (const source_location_table_entry
&ref
)
3349 return (ref
.loc
== UNKNOWN_LOCATION
3351 && ref
.var
== NULL_TREE
);
3355 is_empty (const source_location_table_entry
&ref
)
3357 return (ref
.loc
== UNKNOWN_LOCATION
3359 && ref
.var
== NULL_TREE
);
3363 pch_nx (source_location_table_entry
&p
)
3365 extern void gt_pch_nx (source_location_table_entry
&);
3370 pch_nx (source_location_table_entry
&p
, gt_pointer_operator op
, void *cookie
)
3372 extern void gt_pch_nx (source_location_table_entry
*, gt_pointer_operator
,
3374 gt_pch_nx (&p
, op
, cookie
);
3378 static GTY(()) hash_table
<source_location_table_entry_hash
>
3379 *source_location_table
;
3380 static GTY(()) unsigned int source_location_id
;
3382 /* Fold the __builtin_source_location () call T. */
3385 fold_builtin_source_location (const_tree t
)
3387 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
3388 /* TREE_TYPE (t) is const std::source_location::__impl* */
3389 tree source_location_impl
= TREE_TYPE (TREE_TYPE (t
));
3390 if (source_location_impl
== error_mark_node
)
3391 return build_zero_cst (const_ptr_type_node
);
3392 gcc_assert (CLASS_TYPE_P (source_location_impl
)
3393 && id_equal (TYPE_IDENTIFIER (source_location_impl
), "__impl"));
3395 location_t loc
= EXPR_LOCATION (t
);
3396 if (source_location_table
== NULL
)
3397 source_location_table
3398 = hash_table
<source_location_table_entry_hash
>::create_ggc (64);
3399 const line_map_ordinary
*map
;
3400 source_location_table_entry entry
;
3402 = linemap_resolve_location (line_table
, loc
, LRK_MACRO_EXPANSION_POINT
,
3404 entry
.uid
= current_function_decl
? DECL_UID (current_function_decl
) : -1;
3405 entry
.var
= error_mark_node
;
3406 source_location_table_entry
*entryp
3407 = source_location_table
->find_slot (entry
, INSERT
);
3414 ASM_GENERATE_INTERNAL_LABEL (tmp_name
, "Lsrc_loc", source_location_id
++);
3415 var
= build_decl (loc
, VAR_DECL
, get_identifier (tmp_name
),
3416 source_location_impl
);
3417 TREE_STATIC (var
) = 1;
3418 TREE_PUBLIC (var
) = 0;
3419 DECL_ARTIFICIAL (var
) = 1;
3420 DECL_IGNORED_P (var
) = 1;
3421 DECL_EXTERNAL (var
) = 0;
3422 DECL_DECLARED_CONSTEXPR_P (var
) = 1;
3423 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var
) = 1;
3424 layout_decl (var
, 0);
3426 vec
<constructor_elt
, va_gc
> *v
= NULL
;
3428 for (tree field
= TYPE_FIELDS (source_location_impl
);
3429 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3430 field
= DECL_CHAIN (field
))
3432 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3433 tree val
= NULL_TREE
;
3434 if (strcmp (n
, "_M_file_name") == 0)
3436 if (const char *fname
= LOCATION_FILE (loc
))
3438 fname
= remap_macro_filename (fname
);
3439 val
= build_string_literal (fname
);
3442 val
= build_string_literal ("");
3444 else if (strcmp (n
, "_M_function_name") == 0)
3446 const char *name
= "";
3448 if (current_function_decl
)
3449 name
= cxx_printable_name (current_function_decl
, 2);
3451 val
= build_string_literal (name
);
3453 else if (strcmp (n
, "_M_line") == 0)
3454 val
= build_int_cst (TREE_TYPE (field
), LOCATION_LINE (loc
));
3455 else if (strcmp (n
, "_M_column") == 0)
3456 val
= build_int_cst (TREE_TYPE (field
), LOCATION_COLUMN (loc
));
3459 CONSTRUCTOR_APPEND_ELT (v
, field
, val
);
3462 tree ctor
= build_constructor (source_location_impl
, v
);
3463 TREE_CONSTANT (ctor
) = 1;
3464 TREE_STATIC (ctor
) = 1;
3465 DECL_INITIAL (var
) = ctor
;
3466 varpool_node::finalize_decl (var
);
3471 return build_fold_addr_expr_with_type_loc (loc
, var
, TREE_TYPE (t
));
3474 #include "gt-cp-cp-gimplify.h"