1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
37 #include "gcc-rich-location.h"
41 #include "file-prefix-map.h"
43 #include "omp-general.h"
46 /* Forward declarations. */
48 static tree
cp_genericize_r (tree
*, int *, void *);
49 static tree
cp_fold_r (tree
*, int *, void *);
50 static void cp_genericize_tree (tree
*, bool);
51 static tree
cp_fold (tree
);
53 /* Genericize a TRY_BLOCK. */
56 genericize_try_block (tree
*stmt_p
)
58 tree body
= TRY_STMTS (*stmt_p
);
59 tree cleanup
= TRY_HANDLERS (*stmt_p
);
61 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
64 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
67 genericize_catch_block (tree
*stmt_p
)
69 tree type
= HANDLER_TYPE (*stmt_p
);
70 tree body
= HANDLER_BODY (*stmt_p
);
72 /* FIXME should the caught type go in TREE_TYPE? */
73 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
76 /* A terser interface for building a representation of an exception
80 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
84 /* FIXME should the allowed types go in TREE_TYPE? */
85 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
86 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
88 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
89 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
94 /* Genericize an EH_SPEC_BLOCK by converting it to a
95 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
98 genericize_eh_spec_block (tree
*stmt_p
)
100 tree body
= EH_SPEC_STMTS (*stmt_p
);
101 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
102 tree failure
= build_call_n (call_unexpected_fn
, 1, build_exc_ptr ());
104 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
105 suppress_warning (*stmt_p
);
106 suppress_warning (TREE_OPERAND (*stmt_p
, 1));
109 /* Return the first non-compound statement in STMT. */
112 first_stmt (tree stmt
)
114 switch (TREE_CODE (stmt
))
117 if (tree_statement_list_node
*p
= STATEMENT_LIST_HEAD (stmt
))
118 return first_stmt (p
->stmt
);
122 return first_stmt (BIND_EXPR_BODY (stmt
));
129 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
132 genericize_if_stmt (tree
*stmt_p
)
134 tree stmt
, cond
, then_
, else_
;
135 location_t locus
= EXPR_LOCATION (*stmt_p
);
138 cond
= IF_COND (stmt
);
139 then_
= THEN_CLAUSE (stmt
);
140 else_
= ELSE_CLAUSE (stmt
);
144 tree ft
= first_stmt (then_
);
145 tree fe
= first_stmt (else_
);
147 if (TREE_CODE (ft
) == PREDICT_EXPR
148 && TREE_CODE (fe
) == PREDICT_EXPR
149 && (pr
= PREDICT_EXPR_PREDICTOR (ft
)) == PREDICT_EXPR_PREDICTOR (fe
)
150 && (pr
== PRED_HOT_LABEL
|| pr
== PRED_COLD_LABEL
))
152 gcc_rich_location
richloc (EXPR_LOC_OR_LOC (ft
, locus
));
153 richloc
.add_range (EXPR_LOC_OR_LOC (fe
, locus
));
154 warning_at (&richloc
, OPT_Wattributes
,
155 "both branches of %<if%> statement marked as %qs",
156 pr
== PRED_HOT_LABEL
? "likely" : "unlikely");
161 then_
= build_empty_stmt (locus
);
163 else_
= build_empty_stmt (locus
);
165 /* consteval if has been verified not to have the then_/else_ blocks
166 entered by gotos/case labels from elsewhere, and as then_ block
167 can contain unfolded immediate function calls, we have to discard
168 the then_ block regardless of whether else_ has side-effects or not. */
169 if (IF_STMT_CONSTEVAL_P (stmt
))
171 if (block_may_fallthru (then_
))
172 stmt
= build3 (COND_EXPR
, void_type_node
, boolean_false_node
,
177 else if (IF_STMT_CONSTEXPR_P (stmt
))
178 stmt
= integer_nonzerop (cond
) ? then_
: else_
;
180 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
181 protected_set_expr_location_if_unset (stmt
, locus
);
185 /* Hook into the middle of gimplifying an OMP_FOR node. */
187 static enum gimplify_status
188 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
190 tree for_stmt
= *expr_p
;
191 gimple_seq seq
= NULL
;
193 /* Protect ourselves from recursion. */
194 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
196 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
198 gimplify_and_add (for_stmt
, &seq
);
199 gimple_seq_add_seq (pre_p
, seq
);
201 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
206 /* Gimplify an EXPR_STMT node. */
209 gimplify_expr_stmt (tree
*stmt_p
)
211 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
213 if (stmt
== error_mark_node
)
216 /* Gimplification of a statement expression will nullify the
217 statement if all its side effects are moved to *PRE_P and *POST_P.
219 In this case we will not want to emit the gimplified statement.
220 However, we may still want to emit a warning, so we do that before
222 if (stmt
&& warn_unused_value
)
224 if (!TREE_SIDE_EFFECTS (stmt
))
226 if (!IS_EMPTY_STMT (stmt
)
227 && !VOID_TYPE_P (TREE_TYPE (stmt
))
228 && !warning_suppressed_p (stmt
, OPT_Wunused_value
))
229 warning (OPT_Wunused_value
, "statement with no effect");
232 warn_if_unused_value (stmt
, input_location
);
235 if (stmt
== NULL_TREE
)
236 stmt
= alloc_stmt_list ();
241 /* Gimplify initialization from an AGGR_INIT_EXPR. */
244 cp_gimplify_init_expr (tree
*expr_p
)
246 tree from
= TREE_OPERAND (*expr_p
, 1);
247 tree to
= TREE_OPERAND (*expr_p
, 0);
250 if (TREE_CODE (from
) == TARGET_EXPR
)
251 if (tree init
= TARGET_EXPR_INITIAL (from
))
253 if (target_expr_needs_replace (from
))
255 /* If this was changed by cp_genericize_target_expr, we need to
256 walk into it to replace uses of the slot. */
257 replace_decl (&init
, TARGET_EXPR_SLOT (from
), to
);
265 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
266 inside the TARGET_EXPR. */
269 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
271 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
272 replace the slot operand with our target.
274 Should we add a target parm to gimplify_expr instead? No, as in this
275 case we want to replace the INIT_EXPR. */
276 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
277 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
279 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
280 AGGR_INIT_EXPR_SLOT (sub
) = to
;
282 VEC_INIT_EXPR_SLOT (sub
) = to
;
285 /* The initialization is now a side-effect, so the container can
288 TREE_TYPE (from
) = void_type_node
;
291 /* Handle aggregate NSDMI. */
292 replace_placeholders (sub
, to
);
297 t
= TREE_OPERAND (t
, 1);
302 /* Gimplify a MUST_NOT_THROW_EXPR. */
304 static enum gimplify_status
305 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
308 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
309 tree body
= TREE_OPERAND (stmt
, 0);
310 gimple_seq try_
= NULL
;
311 gimple_seq catch_
= NULL
;
314 gimplify_and_add (body
, &try_
);
315 mnt
= gimple_build_eh_must_not_throw (terminate_fn
);
316 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
317 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
319 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
330 /* Return TRUE if an operand (OP) of a given TYPE being copied is
331 really just an empty class copy.
333 Check that the operand has a simple form so that TARGET_EXPRs and
334 non-empty CONSTRUCTORs get reduced properly, and we leave the
335 return slot optimization alone because it isn't a copy. */
338 simple_empty_class_p (tree type
, tree op
, tree_code code
)
340 if (TREE_CODE (op
) == COMPOUND_EXPR
)
341 return simple_empty_class_p (type
, TREE_OPERAND (op
, 1), code
);
342 if (SIMPLE_TARGET_EXPR_P (op
)
343 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type
))
344 /* The TARGET_EXPR is itself a simple copy, look through it. */
345 return simple_empty_class_p (type
, TARGET_EXPR_INITIAL (op
), code
);
347 if (TREE_CODE (op
) == PARM_DECL
348 && TREE_ADDRESSABLE (TREE_TYPE (op
)))
350 tree fn
= DECL_CONTEXT (op
);
351 if (DECL_THUNK_P (fn
)
352 || lambda_static_thunk_p (fn
))
353 /* In a thunk, we pass through invisible reference parms, so this isn't
359 (TREE_CODE (op
) == EMPTY_CLASS_EXPR
360 || code
== MODIFY_EXPR
361 || is_gimple_lvalue (op
)
362 || INDIRECT_REF_P (op
)
363 || (TREE_CODE (op
) == CONSTRUCTOR
364 && CONSTRUCTOR_NELTS (op
) == 0)
365 || (TREE_CODE (op
) == CALL_EXPR
366 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
367 && !TREE_CLOBBER_P (op
)
368 && is_really_empty_class (type
, /*ignore_vptr*/true);
371 /* Returns true if evaluating E as an lvalue has side-effects;
372 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
373 have side-effects until there is a read or write through it. */
376 lvalue_has_side_effects (tree e
)
378 if (!TREE_SIDE_EFFECTS (e
))
380 while (handled_component_p (e
))
382 if (TREE_CODE (e
) == ARRAY_REF
383 && TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 1)))
385 e
= TREE_OPERAND (e
, 0);
388 /* Just naming a variable has no side-effects. */
390 else if (INDIRECT_REF_P (e
))
391 /* Similarly, indirection has no side-effects. */
392 return TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 0));
394 /* For anything else, trust TREE_SIDE_EFFECTS. */
395 return TREE_SIDE_EFFECTS (e
);
398 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
399 by expressions with side-effects in other operands. */
401 static enum gimplify_status
402 gimplify_to_rvalue (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
403 bool (*gimple_test_f
) (tree
))
405 enum gimplify_status t
406 = gimplify_expr (expr_p
, pre_p
, post_p
, gimple_test_f
, fb_rvalue
);
409 else if (is_gimple_variable (*expr_p
) && TREE_CODE (*expr_p
) != SSA_NAME
)
410 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
);
414 /* Like gimplify_arg, but if ORDERED is set (which should be set if
415 any of the arguments this argument is sequenced before has
416 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
417 are gimplified into SSA_NAME or a fresh temporary and for
418 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
420 static enum gimplify_status
421 cp_gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
424 enum gimplify_status t
;
426 && !is_gimple_reg_type (TREE_TYPE (*arg_p
))
427 && TREE_CODE (*arg_p
) == TARGET_EXPR
)
429 /* gimplify_arg would strip away the TARGET_EXPR, but
430 that can mean we don't copy the argument and some following
431 argument with side-effect could modify it. */
432 protected_set_expr_location (*arg_p
, call_location
);
433 return gimplify_expr (arg_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_either
);
437 t
= gimplify_arg (arg_p
, pre_p
, call_location
);
441 && is_gimple_reg_type (TREE_TYPE (*arg_p
))
442 && is_gimple_variable (*arg_p
)
443 && TREE_CODE (*arg_p
) != SSA_NAME
444 /* No need to force references into register, references
445 can't be modified. */
446 && !TYPE_REF_P (TREE_TYPE (*arg_p
))
447 /* And this can't be modified either. */
448 && *arg_p
!= current_class_ptr
)
449 *arg_p
= get_initialized_tmp_var (*arg_p
, pre_p
);
455 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
458 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
460 int saved_stmts_are_full_exprs_p
= 0;
461 location_t loc
= cp_expr_loc_or_input_loc (*expr_p
);
462 enum tree_code code
= TREE_CODE (*expr_p
);
463 enum gimplify_status ret
;
465 if (STATEMENT_CODE_P (code
))
467 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
468 current_stmt_tree ()->stmts_are_full_exprs_p
469 = STMT_IS_FULL_EXPR_P (*expr_p
);
475 simplify_aggr_init_expr (expr_p
);
481 *expr_p
= expand_vec_init_expr (NULL_TREE
, *expr_p
,
482 tf_warning_or_error
);
485 cp_walk_tree (expr_p
, cp_fold_r
, &pset
, NULL
);
486 cp_genericize_tree (expr_p
, false);
487 copy_if_shared (expr_p
);
493 /* FIXME communicate throw type to back end, probably by moving
494 THROW_EXPR into ../tree.def. */
495 *expr_p
= TREE_OPERAND (*expr_p
, 0);
499 case MUST_NOT_THROW_EXPR
:
500 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
503 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
504 LHS of an assignment might also be involved in the RHS, as in bug
507 cp_gimplify_init_expr (expr_p
);
508 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
514 /* If the back end isn't clever enough to know that the lhs and rhs
515 types are the same, add an explicit conversion. */
516 tree op0
= TREE_OPERAND (*expr_p
, 0);
517 tree op1
= TREE_OPERAND (*expr_p
, 1);
519 if (!error_operand_p (op0
)
520 && !error_operand_p (op1
)
521 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
522 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
523 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
524 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
525 TREE_TYPE (op0
), op1
);
527 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
, code
))
529 while (TREE_CODE (op1
) == TARGET_EXPR
)
530 /* We're disconnecting the initializer from its target,
531 don't create a temporary. */
532 op1
= TARGET_EXPR_INITIAL (op1
);
534 /* Remove any copies of empty classes. Also drop volatile
535 variables on the RHS to avoid infinite recursion from
536 gimplify_expr trying to load the value. */
537 if (TREE_SIDE_EFFECTS (op1
))
539 if (TREE_THIS_VOLATILE (op1
)
540 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
541 op1
= build_fold_addr_expr (op1
);
543 gimplify_and_add (op1
, pre_p
);
545 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
546 is_gimple_lvalue
, fb_lvalue
);
547 *expr_p
= TREE_OPERAND (*expr_p
, 0);
548 if (code
== RETURN_EXPR
&& REFERENCE_CLASS_P (*expr_p
))
549 /* Avoid 'return *<retval>;' */
550 *expr_p
= TREE_OPERAND (*expr_p
, 0);
552 /* P0145 says that the RHS is sequenced before the LHS.
553 gimplify_modify_expr gimplifies the RHS before the LHS, but that
554 isn't quite strong enough in two cases:
556 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
557 mean it's evaluated after the LHS.
559 2) the value calculation of the RHS is also sequenced before the
560 LHS, so for scalar assignment we need to preevaluate if the
561 RHS could be affected by LHS side-effects even if it has no
562 side-effects of its own. We don't need this for classes because
563 class assignment takes its RHS by reference. */
564 else if (flag_strong_eval_order
> 1
565 && TREE_CODE (*expr_p
) == MODIFY_EXPR
566 && lvalue_has_side_effects (op0
)
567 && (TREE_CODE (op1
) == CALL_EXPR
568 || (SCALAR_TYPE_P (TREE_TYPE (op1
))
569 && !TREE_CONSTANT (op1
))))
570 TREE_OPERAND (*expr_p
, 1) = get_initialized_tmp_var (op1
, pre_p
);
575 case EMPTY_CLASS_EXPR
:
576 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
577 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
582 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
587 genericize_try_block (expr_p
);
592 genericize_catch_block (expr_p
);
597 genericize_eh_spec_block (expr_p
);
617 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
621 gimplify_expr_stmt (expr_p
);
625 case UNARY_PLUS_EXPR
:
627 tree arg
= TREE_OPERAND (*expr_p
, 0);
628 tree type
= TREE_TYPE (*expr_p
);
629 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
637 if (flag_strong_eval_order
== 2
638 && CALL_EXPR_FN (*expr_p
)
639 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
)
640 && cp_get_callee_fndecl_nofold (*expr_p
) == NULL_TREE
)
642 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
643 enum gimplify_status t
644 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
645 is_gimple_call_addr
);
648 /* GIMPLE considers most pointer conversion useless, but for
649 calls we actually care about the exact function pointer type. */
650 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p
)) != fnptrtype
)
651 CALL_EXPR_FN (*expr_p
)
652 = build1 (NOP_EXPR
, fnptrtype
, CALL_EXPR_FN (*expr_p
));
654 if (!CALL_EXPR_FN (*expr_p
))
655 /* Internal function call. */;
656 else if (CALL_EXPR_REVERSE_ARGS (*expr_p
))
658 /* This is a call to a (compound) assignment operator that used
659 the operator syntax; gimplify the RHS first. */
660 gcc_assert (call_expr_nargs (*expr_p
) == 2);
661 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p
));
662 enum gimplify_status t
663 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 1), pre_p
, loc
,
664 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, 0)));
668 else if (CALL_EXPR_ORDERED_ARGS (*expr_p
))
670 /* Leave the last argument for gimplify_call_expr, to avoid problems
671 with __builtin_va_arg_pack(). */
672 int nargs
= call_expr_nargs (*expr_p
) - 1;
673 int last_side_effects_arg
= -1;
674 for (int i
= nargs
; i
> 0; --i
)
675 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
677 last_side_effects_arg
= i
;
680 for (int i
= 0; i
< nargs
; ++i
)
682 enum gimplify_status t
683 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
,
684 i
< last_side_effects_arg
);
689 else if (flag_strong_eval_order
690 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
))
692 /* If flag_strong_eval_order, evaluate the object argument first. */
693 tree fntype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
694 if (INDIRECT_TYPE_P (fntype
))
695 fntype
= TREE_TYPE (fntype
);
696 if (TREE_CODE (fntype
) == METHOD_TYPE
)
698 int nargs
= call_expr_nargs (*expr_p
);
699 bool side_effects
= false;
700 for (int i
= 1; i
< nargs
; ++i
)
701 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
706 enum gimplify_status t
707 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 0), pre_p
, loc
,
715 tree decl
= cp_get_callee_fndecl_nofold (*expr_p
);
716 if (decl
&& fndecl_built_in_p (decl
, BUILT_IN_FRONTEND
))
717 switch (DECL_FE_FUNCTION_CODE (decl
))
719 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
720 *expr_p
= boolean_false_node
;
722 case CP_BUILT_IN_SOURCE_LOCATION
:
724 = fold_builtin_source_location (EXPR_LOCATION (*expr_p
));
726 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
728 = fold_builtin_is_corresponding_member
729 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
730 &CALL_EXPR_ARG (*expr_p
, 0));
732 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
734 = fold_builtin_is_pointer_inverconvertible_with_class
735 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
736 &CALL_EXPR_ARG (*expr_p
, 0));
745 /* A TARGET_EXPR that expresses direct-initialization should have been
746 elided by cp_gimplify_init_expr. */
747 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p
));
752 *expr_p
= cplus_expand_constant (*expr_p
);
753 if (TREE_CODE (*expr_p
) == PTRMEM_CST
)
760 if (TREE_OPERAND (*expr_p
, 0)
761 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
762 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
764 expr_p
= &TREE_OPERAND (*expr_p
, 0);
765 /* Avoid going through the INIT_EXPR case, which can
766 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
767 goto modify_expr_case
;
772 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
776 /* Restore saved state. */
777 if (STATEMENT_CODE_P (code
))
778 current_stmt_tree ()->stmts_are_full_exprs_p
779 = saved_stmts_are_full_exprs_p
;
785 is_invisiref_parm (const_tree t
)
787 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
788 && DECL_BY_REFERENCE (t
));
791 /* A stable comparison routine for use with splay trees and DECLs. */
794 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
799 return DECL_UID (a
) - DECL_UID (b
);
802 /* OpenMP context during genericization. */
804 struct cp_genericize_omp_taskreg
808 struct cp_genericize_omp_taskreg
*outer
;
809 splay_tree variables
;
812 /* Return true if genericization should try to determine if
813 DECL is firstprivate or shared within task regions. */
816 omp_var_to_track (tree decl
)
818 tree type
= TREE_TYPE (decl
);
819 if (is_invisiref_parm (decl
))
820 type
= TREE_TYPE (type
);
821 else if (TYPE_REF_P (type
))
822 type
= TREE_TYPE (type
);
823 while (TREE_CODE (type
) == ARRAY_TYPE
)
824 type
= TREE_TYPE (type
);
825 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
827 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
829 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
834 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
837 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
839 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
840 (splay_tree_key
) decl
);
843 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
845 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
846 if (!omp_ctx
->default_shared
)
848 struct cp_genericize_omp_taskreg
*octx
;
850 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
852 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
853 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
855 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
858 if (octx
->is_parallel
)
862 && (TREE_CODE (decl
) == PARM_DECL
863 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
864 && DECL_CONTEXT (decl
) == current_function_decl
)))
865 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
866 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
868 /* DECL is implicitly determined firstprivate in
869 the current task construct. Ensure copy ctor and
870 dtor are instantiated, because during gimplification
871 it will be already too late. */
872 tree type
= TREE_TYPE (decl
);
873 if (is_invisiref_parm (decl
))
874 type
= TREE_TYPE (type
);
875 else if (TYPE_REF_P (type
))
876 type
= TREE_TYPE (type
);
877 while (TREE_CODE (type
) == ARRAY_TYPE
)
878 type
= TREE_TYPE (type
);
879 get_copy_ctor (type
, tf_none
);
880 get_dtor (type
, tf_none
);
883 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
887 /* If we might need to clean up a partially constructed object, break down the
888 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
889 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
893 cp_genericize_init (tree
*replace
, tree from
, tree to
)
895 if (TREE_CODE (from
) == VEC_INIT_EXPR
)
897 tree init
= expand_vec_init_expr (to
, from
, tf_warning_or_error
);
899 /* Make cp_gimplify_init_expr call replace_decl. */
900 *replace
= fold_convert (void_type_node
, init
);
902 else if (flag_exceptions
903 && TREE_CODE (from
) == CONSTRUCTOR
904 && TREE_SIDE_EFFECTS (from
)
905 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from
)))
907 to
= cp_stabilize_reference (to
);
908 replace_placeholders (from
, to
);
909 *replace
= split_nonconstant_init (to
, from
);
913 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
916 cp_genericize_init_expr (tree
*stmt_p
)
918 tree to
= TREE_OPERAND (*stmt_p
, 0);
919 tree from
= TREE_OPERAND (*stmt_p
, 1);
920 if (SIMPLE_TARGET_EXPR_P (from
)
921 /* Return gets confused if we clobber its INIT_EXPR this soon. */
922 && TREE_CODE (to
) != RESULT_DECL
)
923 from
= TARGET_EXPR_INITIAL (from
);
924 cp_genericize_init (stmt_p
, from
, to
);
927 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
928 replace_decl later when we know what we're initializing. */
931 cp_genericize_target_expr (tree
*stmt_p
)
933 tree slot
= TARGET_EXPR_SLOT (*stmt_p
);
934 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p
),
935 TARGET_EXPR_INITIAL (*stmt_p
), slot
);
936 gcc_assert (!DECL_INITIAL (slot
));
939 /* Genericization context. */
941 struct cp_genericize_data
943 hash_set
<tree
> *p_set
;
944 auto_vec
<tree
> bind_expr_stack
;
945 struct cp_genericize_omp_taskreg
*omp_ctx
;
948 bool handle_invisiref_parm_p
;
951 /* Perform any pre-gimplification folding of C++ front end trees to
953 Note: The folding of non-omp cases is something to move into
954 the middle-end. As for now we have most foldings only on GENERIC
955 in fold-const, we need to perform this before transformation to
961 bool genericize
; // called from cp_fold_function?
963 cp_fold_data (bool g
): genericize (g
) {}
967 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
969 cp_fold_data
*data
= (cp_fold_data
*)data_
;
971 enum tree_code code
= TREE_CODE (stmt
);
976 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt
)) == FUNCTION_DECL
977 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt
)))
979 if (!data
->pset
.add (stmt
))
980 error_at (PTRMEM_CST_LOCATION (stmt
),
981 "taking address of an immediate function %qD",
982 PTRMEM_CST_MEMBER (stmt
));
983 stmt
= *stmt_p
= build_zero_cst (TREE_TYPE (stmt
));
989 if (TREE_CODE (TREE_OPERAND (stmt
, 0)) == FUNCTION_DECL
990 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt
, 0)))
992 error_at (EXPR_LOCATION (stmt
),
993 "taking address of an immediate function %qD",
994 TREE_OPERAND (stmt
, 0));
995 stmt
= *stmt_p
= build_zero_cst (TREE_TYPE (stmt
));
1001 if (tree fndecl
= cp_get_callee_fndecl_nofold (stmt
))
1002 if (DECL_IMMEDIATE_FUNCTION_P (fndecl
)
1003 && source_location_current_p (fndecl
))
1004 *stmt_p
= stmt
= cxx_constant_value (stmt
);
1011 *stmt_p
= stmt
= cp_fold (*stmt_p
);
1013 if (data
->pset
.add (stmt
))
1015 /* Don't walk subtrees of stmts we've already walked once, otherwise
1016 we can have exponential complexity with e.g. lots of nested
1017 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1018 always the same tree, which the first time cp_fold_r has been
1019 called on it had the subtrees walked. */
1024 code
= TREE_CODE (stmt
);
1031 case OMP_DISTRIBUTE
:
1035 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
1036 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
1037 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
1038 x
= OMP_FOR_COND (stmt
);
1039 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
1041 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
1042 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
1044 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
1046 n
= TREE_VEC_LENGTH (x
);
1047 for (i
= 0; i
< n
; i
++)
1049 tree o
= TREE_VEC_ELT (x
, i
);
1050 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
1051 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1054 x
= OMP_FOR_INCR (stmt
);
1055 if (x
&& TREE_CODE (x
) == TREE_VEC
)
1057 n
= TREE_VEC_LENGTH (x
);
1058 for (i
= 0; i
< n
; i
++)
1060 tree o
= TREE_VEC_ELT (x
, i
);
1061 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
1062 o
= TREE_OPERAND (o
, 1);
1063 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
1064 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
1066 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
1067 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1071 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1076 if (IF_STMT_CONSTEVAL_P (stmt
))
1078 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1079 boolean_false_node. */
1080 cp_walk_tree (&ELSE_CLAUSE (stmt
), cp_fold_r
, data
, NULL
);
1081 cp_walk_tree (&IF_SCOPE (stmt
), cp_fold_r
, data
, NULL
);
1087 /* These are only for genericize time; they're here rather than in
1088 cp_genericize to avoid problems with the invisible reference
1091 if (data
->genericize
)
1092 cp_genericize_init_expr (stmt_p
);
1096 if (data
->genericize
)
1097 cp_genericize_target_expr (stmt_p
);
1107 /* Fold ALL the trees! FIXME we should be able to remove this, but
1108 apparently that still causes optimization regressions. */
1111 cp_fold_function (tree fndecl
)
1113 cp_fold_data
data (/*genericize*/true);
1114 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &data
, NULL
);
1117 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1119 static tree
genericize_spaceship (tree expr
)
1121 iloc_sentinel
s (cp_expr_location (expr
));
1122 tree type
= TREE_TYPE (expr
);
1123 tree op0
= TREE_OPERAND (expr
, 0);
1124 tree op1
= TREE_OPERAND (expr
, 1);
1125 return genericize_spaceship (input_location
, type
, op0
, op1
);
1128 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1129 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1130 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1131 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1134 predeclare_vla (tree expr
)
1136 tree type
= TREE_TYPE (expr
);
1137 if (type
== error_mark_node
)
1139 if (is_typedef_decl (expr
))
1140 type
= DECL_ORIGINAL_TYPE (expr
);
1142 /* We need to strip pointers for gimplify_type_sizes. */
1144 while (POINTER_TYPE_P (vla
))
1146 if (TYPE_NAME (vla
))
1148 vla
= TREE_TYPE (vla
);
1150 if (vla
== type
|| TYPE_NAME (vla
)
1151 || !variably_modified_type_p (vla
, NULL_TREE
))
1154 tree decl
= build_decl (input_location
, TYPE_DECL
, NULL_TREE
, vla
);
1155 DECL_ARTIFICIAL (decl
) = 1;
1156 TYPE_NAME (vla
) = decl
;
1157 tree dexp
= build_stmt (input_location
, DECL_EXPR
, decl
);
1165 expr
= build2 (COMPOUND_EXPR
, type
, dexp
, expr
);
1170 /* Perform any pre-gimplification lowering of C++ front end trees to
1174 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1176 tree stmt
= *stmt_p
;
1177 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1178 hash_set
<tree
> *p_set
= wtd
->p_set
;
1180 /* If in an OpenMP context, note var uses. */
1181 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1183 || TREE_CODE (stmt
) == PARM_DECL
1184 || TREE_CODE (stmt
) == RESULT_DECL
)
1185 && omp_var_to_track (stmt
))
1186 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1188 /* Don't dereference parms in a thunk, pass the references through. */
1189 if ((TREE_CODE (stmt
) == CALL_EXPR
&& call_from_lambda_thunk_p (stmt
))
1190 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1196 /* Dereference invisible reference parms. */
1197 if (wtd
->handle_invisiref_parm_p
&& is_invisiref_parm (stmt
))
1199 *stmt_p
= convert_from_reference (stmt
);
1200 p_set
->add (*stmt_p
);
1205 /* Map block scope extern declarations to visible declarations with the
1206 same name and type in outer scopes if any. */
1207 if (VAR_OR_FUNCTION_DECL_P (stmt
) && DECL_LOCAL_DECL_P (stmt
))
1208 if (tree alias
= DECL_LOCAL_DECL_ALIAS (stmt
))
1210 if (alias
!= error_mark_node
)
1213 TREE_USED (alias
) |= TREE_USED (stmt
);
1219 if (TREE_CODE (stmt
) == INTEGER_CST
1220 && TYPE_REF_P (TREE_TYPE (stmt
))
1221 && (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1222 && !wtd
->no_sanitize_p
)
1224 ubsan_maybe_instrument_reference (stmt_p
);
1225 if (*stmt_p
!= stmt
)
1232 /* Other than invisiref parms, don't walk the same tree twice. */
1233 if (p_set
->contains (stmt
))
1239 switch (TREE_CODE (stmt
))
1242 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1244 /* If in an OpenMP context, note var uses. */
1245 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1246 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1247 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1248 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1254 if (TREE_OPERAND (stmt
, 0) && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1255 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1260 switch (OMP_CLAUSE_CODE (stmt
))
1262 case OMP_CLAUSE_LASTPRIVATE
:
1263 /* Don't dereference an invisiref in OpenMP clauses. */
1264 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1267 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1268 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1269 cp_genericize_r
, data
, NULL
);
1272 case OMP_CLAUSE_PRIVATE
:
1273 /* Don't dereference an invisiref in OpenMP clauses. */
1274 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1276 else if (wtd
->omp_ctx
!= NULL
)
1278 /* Private clause doesn't cause any references to the
1279 var in outer contexts, avoid calling
1280 omp_cxx_notice_variable for it. */
1281 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1282 wtd
->omp_ctx
= NULL
;
1283 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1289 case OMP_CLAUSE_SHARED
:
1290 case OMP_CLAUSE_FIRSTPRIVATE
:
1291 case OMP_CLAUSE_COPYIN
:
1292 case OMP_CLAUSE_COPYPRIVATE
:
1293 case OMP_CLAUSE_INCLUSIVE
:
1294 case OMP_CLAUSE_EXCLUSIVE
:
1295 /* Don't dereference an invisiref in OpenMP clauses. */
1296 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1299 case OMP_CLAUSE_REDUCTION
:
1300 case OMP_CLAUSE_IN_REDUCTION
:
1301 case OMP_CLAUSE_TASK_REDUCTION
:
1302 /* Don't dereference an invisiref in reduction clause's
1303 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1304 still needs to be genericized. */
1305 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1308 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1309 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1310 cp_genericize_r
, data
, NULL
);
1311 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1312 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1313 cp_genericize_r
, data
, NULL
);
1321 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1322 to lower this construct before scanning it, so we need to lower these
1323 before doing anything else. */
1325 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1326 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1329 CLEANUP_BODY (stmt
),
1330 CLEANUP_EXPR (stmt
));
1334 genericize_if_stmt (stmt_p
);
1335 /* *stmt_p has changed, tail recurse to handle it again. */
1336 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1338 /* COND_EXPR might have incompatible types in branches if one or both
1339 arms are bitfields. Fix it up now. */
1343 = (TREE_OPERAND (stmt
, 1)
1344 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1347 = (TREE_OPERAND (stmt
, 2)
1348 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1351 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1352 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1354 TREE_OPERAND (stmt
, 1)
1355 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1356 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1360 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1361 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1363 TREE_OPERAND (stmt
, 2)
1364 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1365 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1372 if (UNLIKELY (wtd
->omp_ctx
!= NULL
))
1375 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1377 && !DECL_EXTERNAL (decl
)
1378 && omp_var_to_track (decl
))
1381 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1382 (splay_tree_key
) decl
);
1384 splay_tree_insert (wtd
->omp_ctx
->variables
,
1385 (splay_tree_key
) decl
,
1387 ? OMP_CLAUSE_DEFAULT_SHARED
1388 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1391 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1393 /* The point here is to not sanitize static initializers. */
1394 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1395 wtd
->no_sanitize_p
= true;
1396 for (tree decl
= BIND_EXPR_VARS (stmt
);
1398 decl
= DECL_CHAIN (decl
))
1400 && TREE_STATIC (decl
)
1401 && DECL_INITIAL (decl
))
1402 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1403 wtd
->no_sanitize_p
= no_sanitize_p
;
1405 wtd
->bind_expr_stack
.safe_push (stmt
);
1406 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1407 cp_genericize_r
, data
, NULL
);
1408 wtd
->bind_expr_stack
.pop ();
1413 tree block
= NULL_TREE
;
1415 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1416 BLOCK, and append an IMPORTED_DECL to its
1417 BLOCK_VARS chained list. */
1418 if (wtd
->bind_expr_stack
.exists ())
1421 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1422 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1427 tree decl
= TREE_OPERAND (stmt
, 0);
1430 if (undeduced_auto_decl (decl
))
1431 /* Omit from the GENERIC, the back-end can't handle it. */;
1434 tree using_directive
= make_node (IMPORTED_DECL
);
1435 TREE_TYPE (using_directive
) = void_type_node
;
1436 DECL_CONTEXT (using_directive
) = current_function_decl
;
1438 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
) = decl
;
1439 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1440 BLOCK_VARS (block
) = using_directive
;
1443 /* The USING_STMT won't appear in GENERIC. */
1444 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1450 if (TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1452 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1453 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1458 tree d
= DECL_EXPR_DECL (stmt
);
1460 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1468 struct cp_genericize_omp_taskreg omp_ctx
;
1473 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1474 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1475 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1476 omp_ctx
.outer
= wtd
->omp_ctx
;
1477 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1478 wtd
->omp_ctx
= &omp_ctx
;
1479 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1480 switch (OMP_CLAUSE_CODE (c
))
1482 case OMP_CLAUSE_SHARED
:
1483 case OMP_CLAUSE_PRIVATE
:
1484 case OMP_CLAUSE_FIRSTPRIVATE
:
1485 case OMP_CLAUSE_LASTPRIVATE
:
1486 decl
= OMP_CLAUSE_DECL (c
);
1487 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1489 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1492 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1493 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1494 ? OMP_CLAUSE_DEFAULT_SHARED
1495 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1496 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
&& omp_ctx
.outer
)
1497 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1499 case OMP_CLAUSE_DEFAULT
:
1500 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1501 omp_ctx
.default_shared
= true;
1505 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1506 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
1507 cp_genericize_r
, cp_walk_subtrees
);
1509 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1510 wtd
->omp_ctx
= omp_ctx
.outer
;
1511 splay_tree_delete (omp_ctx
.variables
);
1516 cfun
->has_omp_target
= true;
1522 tree try_block
= wtd
->try_block
;
1523 wtd
->try_block
= stmt
;
1524 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1525 wtd
->try_block
= try_block
;
1526 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1530 case MUST_NOT_THROW_EXPR
:
1531 /* MUST_NOT_THROW_COND might be something else with TM. */
1532 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1535 tree try_block
= wtd
->try_block
;
1536 wtd
->try_block
= stmt
;
1537 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1538 wtd
->try_block
= try_block
;
1544 location_t loc
= location_of (stmt
);
1545 if (warning_suppressed_p (stmt
/* What warning? */))
1547 else if (wtd
->try_block
)
1549 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
)
1551 auto_diagnostic_group d
;
1552 if (warning_at (loc
, OPT_Wterminate
,
1553 "%<throw%> will always call %<terminate%>")
1554 && cxx_dialect
>= cxx11
1555 && DECL_DESTRUCTOR_P (current_function_decl
))
1556 inform (loc
, "in C++11 destructors default to %<noexcept%>");
1561 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1562 && DECL_DESTRUCTOR_P (current_function_decl
)
1563 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1565 && (get_defaulted_eh_spec (current_function_decl
)
1566 == empty_except_spec
))
1567 warning_at (loc
, OPT_Wc__11_compat
,
1568 "in C++11 this %<throw%> will call %<terminate%> "
1569 "because destructors default to %<noexcept%>");
1575 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1578 case SPACESHIP_EXPR
:
1579 *stmt_p
= genericize_spaceship (*stmt_p
);
1583 /* By the time we get here we're handing off to the back end, so we don't
1584 need or want to preserve PTRMEM_CST anymore. */
1585 *stmt_p
= cplus_expand_constant (stmt
);
1590 /* For MEM_REF, make sure not to sanitize the second operand even
1591 if it has reference type. It is just an offset with a type
1592 holding other information. There is no other processing we
1593 need to do for INTEGER_CSTs, so just ignore the second argument
1595 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1600 *stmt_p
= predeclare_vla (*stmt_p
);
1601 if (!wtd
->no_sanitize_p
1602 && sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)
1603 && TYPE_REF_P (TREE_TYPE (stmt
)))
1604 ubsan_maybe_instrument_reference (stmt_p
);
1608 /* Evaluate function concept checks instead of treating them as
1609 normal functions. */
1610 if (concept_check_p (stmt
))
1612 *stmt_p
= evaluate_concept_check (stmt
);
1613 * walk_subtrees
= 0;
1617 if (!wtd
->no_sanitize_p
1618 && sanitize_flags_p ((SANITIZE_NULL
1619 | SANITIZE_ALIGNMENT
| SANITIZE_VPTR
)))
1621 tree fn
= CALL_EXPR_FN (stmt
);
1623 && !error_operand_p (fn
)
1624 && INDIRECT_TYPE_P (TREE_TYPE (fn
))
1625 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1628 = TREE_CODE (fn
) == ADDR_EXPR
1629 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1630 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1631 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1632 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1633 if (sanitize_flags_p (SANITIZE_VPTR
) && !is_ctor
)
1634 cp_ubsan_maybe_instrument_member_call (stmt
);
1636 else if (fn
== NULL_TREE
1637 && CALL_EXPR_IFN (stmt
) == IFN_UBSAN_NULL
1638 && TREE_CODE (CALL_EXPR_ARG (stmt
, 0)) == INTEGER_CST
1639 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt
, 0))))
1643 case AGGR_INIT_EXPR
:
1644 /* For calls to a multi-versioned function, overload resolution
1645 returns the function with the highest target priority, that is,
1646 the version that will checked for dispatching first. If this
1647 version is inlinable, a direct call to this version can be made
1648 otherwise the call should go through the dispatcher. */
1650 tree fn
= cp_get_callee_fndecl_nofold (stmt
);
1651 if (fn
&& DECL_FUNCTION_VERSIONED (fn
)
1652 && (current_function_decl
== NULL
1653 || !targetm
.target_option
.can_inline_p (current_function_decl
,
1655 if (tree dis
= get_function_version_dispatcher (fn
))
1657 mark_versions_used (dis
);
1658 dis
= build_address (dis
);
1659 if (TREE_CODE (stmt
) == CALL_EXPR
)
1660 CALL_EXPR_FN (stmt
) = dis
;
1662 AGGR_INIT_EXPR_FN (stmt
) = dis
;
1668 if (TARGET_EXPR_INITIAL (stmt
)
1669 && TREE_CODE (TARGET_EXPR_INITIAL (stmt
)) == CONSTRUCTOR
1670 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt
)))
1671 TARGET_EXPR_NO_ELIDE (stmt
) = 1;
1674 case TEMPLATE_ID_EXPR
:
1675 gcc_assert (concept_check_p (stmt
));
1676 /* Emit the value of the concept check. */
1677 *stmt_p
= evaluate_concept_check (stmt
);
1681 case OMP_DISTRIBUTE
:
1682 /* Need to explicitly instantiate copy ctors on class iterators of
1683 composite distribute parallel for. */
1684 if (OMP_FOR_INIT (*stmt_p
) == NULL_TREE
)
1686 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
1687 tree inner
= walk_tree (&OMP_FOR_BODY (*stmt_p
),
1688 find_combined_omp_for
, data
, NULL
);
1689 if (inner
!= NULL_TREE
1690 && TREE_CODE (inner
) == OMP_FOR
)
1692 for (int i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner
)); i
++)
1693 if (OMP_FOR_ORIG_DECLS (inner
)
1694 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
1696 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
1699 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
), i
);
1700 /* Class iterators aren't allowed on OMP_SIMD, so the only
1701 case we need to solve is distribute parallel for. */
1702 gcc_assert (TREE_CODE (inner
) == OMP_FOR
1704 tree orig_decl
= TREE_PURPOSE (orig
);
1705 tree c
, cl
= NULL_TREE
;
1706 for (c
= OMP_FOR_CLAUSES (inner
);
1707 c
; c
= OMP_CLAUSE_CHAIN (c
))
1708 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1709 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
1710 && OMP_CLAUSE_DECL (c
) == orig_decl
)
1715 if (cl
== NULL_TREE
)
1717 for (c
= OMP_PARALLEL_CLAUSES (*data
[1]);
1718 c
; c
= OMP_CLAUSE_CHAIN (c
))
1719 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
1720 && OMP_CLAUSE_DECL (c
) == orig_decl
)
1728 orig_decl
= require_complete_type (orig_decl
);
1729 tree inner_type
= TREE_TYPE (orig_decl
);
1730 if (orig_decl
== error_mark_node
)
1732 if (TYPE_REF_P (TREE_TYPE (orig_decl
)))
1733 inner_type
= TREE_TYPE (inner_type
);
1735 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1736 inner_type
= TREE_TYPE (inner_type
);
1737 get_copy_ctor (inner_type
, tf_warning_or_error
);
1754 case STATEMENT_LIST
:
1755 /* These cases are handled by shared code. */
1756 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
1757 cp_genericize_r
, cp_walk_subtrees
);
1761 *stmt_p
= build1_loc (EXPR_LOCATION (stmt
), VIEW_CONVERT_EXPR
,
1762 TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1766 if (IS_TYPE_OR_DECL_P (stmt
))
1771 p_set
->add (*stmt_p
);
1776 /* Lower C++ front end trees to GENERIC in T_P. */
1779 cp_genericize_tree (tree
* t_p
, bool handle_invisiref_parm_p
)
1781 struct cp_genericize_data wtd
;
1783 wtd
.p_set
= new hash_set
<tree
>;
1784 wtd
.bind_expr_stack
.create (0);
1786 wtd
.try_block
= NULL_TREE
;
1787 wtd
.no_sanitize_p
= false;
1788 wtd
.handle_invisiref_parm_p
= handle_invisiref_parm_p
;
1789 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1791 if (sanitize_flags_p (SANITIZE_VPTR
))
1792 cp_ubsan_instrument_member_accesses (t_p
);
1795 /* If a function that should end with a return in non-void
1796 function doesn't obviously end with return, add ubsan
1797 instrumentation code to verify it at runtime. If -fsanitize=return
1798 is not enabled, instrument __builtin_unreachable. */
1801 cp_maybe_instrument_return (tree fndecl
)
1803 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1804 || DECL_CONSTRUCTOR_P (fndecl
)
1805 || DECL_DESTRUCTOR_P (fndecl
)
1806 || !targetm
.warn_func_return (fndecl
))
1809 if (!sanitize_flags_p (SANITIZE_RETURN
, fndecl
)
1810 /* Don't add __builtin_unreachable () if not optimizing, it will not
1811 improve any optimizations in that case, just break UB code.
1812 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1813 UBSan covers this with ubsan_instrument_return above where sufficient
1814 information is provided, while the __builtin_unreachable () below
1815 if return sanitization is disabled will just result in hard to
1816 understand runtime error without location. */
1818 || sanitize_flags_p (SANITIZE_UNREACHABLE
, fndecl
)))
1821 tree t
= DECL_SAVED_TREE (fndecl
);
1824 switch (TREE_CODE (t
))
1827 t
= BIND_EXPR_BODY (t
);
1829 case TRY_FINALLY_EXPR
:
1830 case CLEANUP_POINT_EXPR
:
1831 t
= TREE_OPERAND (t
, 0);
1833 case STATEMENT_LIST
:
1835 tree_stmt_iterator i
= tsi_last (t
);
1836 while (!tsi_end_p (i
))
1838 tree p
= tsi_stmt (i
);
1839 if (TREE_CODE (p
) != DEBUG_BEGIN_STMT
)
1859 tree
*p
= &DECL_SAVED_TREE (fndecl
);
1860 if (TREE_CODE (*p
) == BIND_EXPR
)
1861 p
= &BIND_EXPR_BODY (*p
);
1863 location_t loc
= DECL_SOURCE_LOCATION (fndecl
);
1864 if (sanitize_flags_p (SANITIZE_RETURN
, fndecl
))
1865 t
= ubsan_instrument_return (loc
);
1868 tree fndecl
= builtin_decl_explicit (BUILT_IN_UNREACHABLE
);
1869 t
= build_call_expr_loc (BUILTINS_LOCATION
, fndecl
, 0);
1872 append_to_statement_list (t
, p
);
1876 cp_genericize (tree fndecl
)
1880 /* Fix up the types of parms passed by invisible reference. */
1881 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1882 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1884 /* If a function's arguments are copied to create a thunk,
1885 then DECL_BY_REFERENCE will be set -- but the type of the
1886 argument will be a pointer type, so we will never get
1888 gcc_assert (!DECL_BY_REFERENCE (t
));
1889 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1890 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1891 DECL_BY_REFERENCE (t
) = 1;
1892 TREE_ADDRESSABLE (t
) = 0;
1896 /* Do the same for the return value. */
1897 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1899 t
= DECL_RESULT (fndecl
);
1900 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1901 DECL_BY_REFERENCE (t
) = 1;
1902 TREE_ADDRESSABLE (t
) = 0;
1906 /* Adjust DECL_VALUE_EXPR of the original var. */
1907 tree outer
= outer_curly_brace_block (current_function_decl
);
1911 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1913 && DECL_NAME (t
) == DECL_NAME (var
)
1914 && DECL_HAS_VALUE_EXPR_P (var
)
1915 && DECL_VALUE_EXPR (var
) == t
)
1917 tree val
= convert_from_reference (t
);
1918 SET_DECL_VALUE_EXPR (var
, val
);
1924 /* If we're a clone, the body is already GIMPLE. */
1925 if (DECL_CLONED_FUNCTION_P (fndecl
))
1928 /* Allow cp_genericize calls to be nested. */
1929 bc_state_t save_state
;
1930 save_bc_state (&save_state
);
1932 /* We do want to see every occurrence of the parms, so we can't just use
1933 walk_tree's hash functionality. */
1934 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
), true);
1936 cp_maybe_instrument_return (fndecl
);
1938 /* Do everything else. */
1939 c_genericize (fndecl
);
1940 restore_bc_state (&save_state
);
1943 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1944 NULL if there is in fact nothing to do. ARG2 may be null if FN
1945 actually only takes one argument. */
1948 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1950 tree defparm
, parm
, t
;
1958 nargs
= list_length (DECL_ARGUMENTS (fn
));
1959 argarray
= XALLOCAVEC (tree
, nargs
);
1961 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1963 defparm
= TREE_CHAIN (defparm
);
1965 bool is_method
= TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
;
1966 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1968 tree inner_type
= TREE_TYPE (arg1
);
1969 tree start1
, end1
, p1
;
1970 tree start2
= NULL
, p2
= NULL
;
1971 tree ret
= NULL
, lab
;
1977 inner_type
= TREE_TYPE (inner_type
);
1978 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1979 size_zero_node
, NULL
, NULL
);
1981 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1982 size_zero_node
, NULL
, NULL
);
1984 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1985 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1987 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1989 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1990 end1
= fold_build_pointer_plus (start1
, end1
);
1992 p1
= create_tmp_var (TREE_TYPE (start1
));
1993 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1994 append_to_statement_list (t
, &ret
);
1998 p2
= create_tmp_var (TREE_TYPE (start2
));
1999 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
2000 append_to_statement_list (t
, &ret
);
2003 lab
= create_artificial_label (input_location
);
2004 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
2005 append_to_statement_list (t
, &ret
);
2010 /* Handle default arguments. */
2011 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2012 parm
= TREE_CHAIN (parm
), i
++)
2013 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2014 TREE_PURPOSE (parm
), fn
,
2015 i
- is_method
, tf_warning_or_error
);
2016 t
= build_call_a (fn
, i
, argarray
);
2017 t
= fold_convert (void_type_node
, t
);
2018 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2019 append_to_statement_list (t
, &ret
);
2021 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
2022 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
2023 append_to_statement_list (t
, &ret
);
2027 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
2028 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
2029 append_to_statement_list (t
, &ret
);
2032 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
2033 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
2034 append_to_statement_list (t
, &ret
);
2040 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
2042 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
2043 /* Handle default arguments. */
2044 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2045 parm
= TREE_CHAIN (parm
), i
++)
2046 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2047 TREE_PURPOSE (parm
), fn
,
2048 i
- is_method
, tf_warning_or_error
);
2049 t
= build_call_a (fn
, i
, argarray
);
2050 t
= fold_convert (void_type_node
, t
);
2051 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2055 /* Return code to initialize DECL with its default constructor, or
2056 NULL if there's nothing to do. */
2059 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
2061 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2065 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
2070 /* Return code to initialize DST with a copy constructor from SRC. */
2073 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
2075 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2079 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
2081 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2086 /* Similarly, except use an assignment operator instead. */
2089 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
2091 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2095 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
2097 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2102 /* Return code to destroy DECL. */
2105 cxx_omp_clause_dtor (tree clause
, tree decl
)
2107 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2111 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
2116 /* True if OpenMP should privatize what this DECL points to rather
2117 than the DECL itself. */
2120 cxx_omp_privatize_by_reference (const_tree decl
)
2122 return (TYPE_REF_P (TREE_TYPE (decl
))
2123 || is_invisiref_parm (decl
));
2126 /* Return true if DECL is const qualified var having no mutable member. */
2128 cxx_omp_const_qual_no_mutable (tree decl
)
2130 tree type
= TREE_TYPE (decl
);
2131 if (TYPE_REF_P (type
))
2133 if (!is_invisiref_parm (decl
))
2135 type
= TREE_TYPE (type
);
2137 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
2139 /* NVR doesn't preserve const qualification of the
2141 tree outer
= outer_curly_brace_block (current_function_decl
);
2145 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2147 && DECL_NAME (decl
) == DECL_NAME (var
)
2148 && (TYPE_MAIN_VARIANT (type
)
2149 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
2151 if (TYPE_READONLY (TREE_TYPE (var
)))
2152 type
= TREE_TYPE (var
);
2158 if (type
== error_mark_node
)
2161 /* Variables with const-qualified type having no mutable member
2162 are predetermined shared. */
2163 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
2169 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2170 of DECL is predetermined. */
2172 enum omp_clause_default_kind
2173 cxx_omp_predetermined_sharing_1 (tree decl
)
2175 /* Static data members are predetermined shared. */
2176 if (TREE_STATIC (decl
))
2178 tree ctx
= CP_DECL_CONTEXT (decl
);
2179 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
2180 return OMP_CLAUSE_DEFAULT_SHARED
;
2182 if (c_omp_predefined_variable (decl
))
2183 return OMP_CLAUSE_DEFAULT_SHARED
;
2186 /* this may not be specified in data-sharing clauses, still we need
2187 to predetermined it firstprivate. */
2188 if (decl
== current_class_ptr
)
2189 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2191 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2194 /* Likewise, but also include the artificial vars. We don't want to
2195 disallow the artificial vars being mentioned in explicit clauses,
2196 as we use artificial vars e.g. for loop constructs with random
2197 access iterators other than pointers, but during gimplification
2198 we want to treat them as predetermined. */
2200 enum omp_clause_default_kind
2201 cxx_omp_predetermined_sharing (tree decl
)
2203 enum omp_clause_default_kind ret
= cxx_omp_predetermined_sharing_1 (decl
);
2204 if (ret
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
2207 /* Predetermine artificial variables holding integral values, those
2208 are usually result of gimplify_one_sizepos or SAVE_EXPR
2211 && DECL_ARTIFICIAL (decl
)
2212 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2213 && !(DECL_LANG_SPECIFIC (decl
)
2214 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2215 return OMP_CLAUSE_DEFAULT_SHARED
;
2217 /* Similarly for typeinfo symbols. */
2218 if (VAR_P (decl
) && DECL_ARTIFICIAL (decl
) && DECL_TINFO_P (decl
))
2219 return OMP_CLAUSE_DEFAULT_SHARED
;
2221 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2224 enum omp_clause_defaultmap_kind
2225 cxx_omp_predetermined_mapping (tree decl
)
2227 /* Predetermine artificial variables holding integral values, those
2228 are usually result of gimplify_one_sizepos or SAVE_EXPR
2231 && DECL_ARTIFICIAL (decl
)
2232 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2233 && !(DECL_LANG_SPECIFIC (decl
)
2234 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2235 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
2237 if (c_omp_predefined_variable (decl
))
2238 return OMP_CLAUSE_DEFAULTMAP_TO
;
2240 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
2243 /* Finalize an implicitly determined clause. */
2246 cxx_omp_finish_clause (tree c
, gimple_seq
*, bool /* openacc */)
2248 tree decl
, inner_type
;
2249 bool make_shared
= false;
2251 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
2252 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
2253 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LASTPRIVATE
2254 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)))
2257 decl
= OMP_CLAUSE_DECL (c
);
2258 decl
= require_complete_type (decl
);
2259 inner_type
= TREE_TYPE (decl
);
2260 if (decl
== error_mark_node
)
2262 else if (TYPE_REF_P (TREE_TYPE (decl
)))
2263 inner_type
= TREE_TYPE (inner_type
);
2265 /* We're interested in the base element, not arrays. */
2266 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2267 inner_type
= TREE_TYPE (inner_type
);
2269 /* Check for special function availability by building a call to one.
2270 Save the results, because later we won't be in the right context
2271 for making these queries. */
2272 bool first
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
;
2273 bool last
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
;
2275 && CLASS_TYPE_P (inner_type
)
2276 && cxx_omp_create_clause_info (c
, inner_type
, !first
, first
, last
,
2282 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
2283 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
) = 0;
2284 OMP_CLAUSE_SHARED_READONLY (c
) = 0;
2288 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2289 disregarded in OpenMP construct, because it is going to be
2290 remapped during OpenMP lowering. SHARED is true if DECL
2291 is going to be shared, false if it is going to be privatized. */
2294 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
2299 && DECL_HAS_VALUE_EXPR_P (decl
)
2300 && DECL_ARTIFICIAL (decl
)
2301 && DECL_LANG_SPECIFIC (decl
)
2302 && DECL_OMP_PRIVATIZED_MEMBER (decl
))
2304 if (VAR_P (decl
) && DECL_CONTEXT (decl
) && is_capture_proxy (decl
))
2309 /* Fold expression X which is used as an rvalue if RVAL is true. */
2312 cp_fold_maybe_rvalue (tree x
, bool rval
)
2318 x
= mark_rvalue_use (x
);
2319 if (rval
&& DECL_P (x
)
2320 && !TYPE_REF_P (TREE_TYPE (x
)))
2322 tree v
= decl_constant_value (x
);
2323 if (v
!= x
&& v
!= error_mark_node
)
2334 /* Fold expression X which is used as an rvalue. */
2337 cp_fold_rvalue (tree x
)
2339 return cp_fold_maybe_rvalue (x
, true);
2342 /* Perform folding on expression X. */
2345 cp_fully_fold (tree x
)
2347 if (processing_template_decl
)
2349 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2350 have to call both. */
2351 if (cxx_dialect
>= cxx11
)
2353 x
= maybe_constant_value (x
);
2354 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2355 a TARGET_EXPR; undo that here. */
2356 if (TREE_CODE (x
) == TARGET_EXPR
)
2357 x
= TARGET_EXPR_INITIAL (x
);
2358 else if (TREE_CODE (x
) == VIEW_CONVERT_EXPR
2359 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONSTRUCTOR
2360 && TREE_TYPE (TREE_OPERAND (x
, 0)) == TREE_TYPE (x
))
2361 x
= TREE_OPERAND (x
, 0);
2363 return cp_fold_rvalue (x
);
2366 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2370 cp_fully_fold_init (tree x
)
2372 if (processing_template_decl
)
2374 x
= cp_fully_fold (x
);
2375 cp_fold_data
data (/*genericize*/false);
2376 cp_walk_tree (&x
, cp_fold_r
, &data
, NULL
);
2380 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2381 and certain changes are made to the folding done. Or should be (FIXME). We
2382 never touch maybe_const, as it is only used for the C front-end
2383 C_MAYBE_CONST_EXPR. */
2386 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
, bool lval
)
2388 return cp_fold_maybe_rvalue (x
, !lval
);
2391 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_cache
;
2393 /* Dispose of the whole FOLD_CACHE. */
2396 clear_fold_cache (void)
2398 if (fold_cache
!= NULL
)
2399 fold_cache
->empty ();
2402 /* This function tries to fold an expression X.
2403 To avoid combinatorial explosion, folding results are kept in fold_cache.
2404 If X is invalid, we don't fold at all.
2405 For performance reasons we don't cache expressions representing a
2406 declaration or constant.
2407 Function returns X or its folded variant. */
2412 tree op0
, op1
, op2
, op3
;
2413 tree org_x
= x
, r
= NULL_TREE
;
2414 enum tree_code code
;
2416 bool rval_ops
= true;
2418 if (!x
|| x
== error_mark_node
)
2421 if (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
))
2424 /* Don't bother to cache DECLs or constants. */
2425 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
2428 if (fold_cache
== NULL
)
2429 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
2431 if (tree
*cached
= fold_cache
->get (x
))
2434 uid_sensitive_constexpr_evaluation_checker c
;
2436 code
= TREE_CODE (x
);
2439 case CLEANUP_POINT_EXPR
:
2440 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2442 r
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2443 if (!TREE_SIDE_EFFECTS (r
))
2448 x
= fold_sizeof_expr (x
);
2451 case VIEW_CONVERT_EXPR
:
2454 case NON_LVALUE_EXPR
:
2457 if (VOID_TYPE_P (TREE_TYPE (x
)))
2459 /* This is just to make sure we don't end up with casts to
2460 void from error_mark_node. If we just return x, then
2461 cp_fold_r might fold the operand into error_mark_node and
2462 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2463 during gimplification doesn't like such casts.
2464 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2465 folding of the operand should be in the caches and if in cp_fold_r
2466 it will modify it in place. */
2467 op0
= cp_fold (TREE_OPERAND (x
, 0));
2468 if (op0
== error_mark_node
)
2469 x
= error_mark_node
;
2473 loc
= EXPR_LOCATION (x
);
2474 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2476 if (code
== CONVERT_EXPR
2477 && SCALAR_TYPE_P (TREE_TYPE (x
))
2478 && op0
!= void_node
)
2479 /* During parsing we used convert_to_*_nofold; re-convert now using the
2480 folding variants, since fold() doesn't do those transformations. */
2481 x
= fold (convert (TREE_TYPE (x
), op0
));
2482 else if (op0
!= TREE_OPERAND (x
, 0))
2484 if (op0
== error_mark_node
)
2485 x
= error_mark_node
;
2487 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2492 /* Conversion of an out-of-range value has implementation-defined
2493 behavior; the language considers it different from arithmetic
2494 overflow, which is undefined. */
2495 if (TREE_CODE (op0
) == INTEGER_CST
2496 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
2497 TREE_OVERFLOW (x
) = false;
2502 /* We don't need the decltype(auto) obfuscation anymore. */
2503 if (REF_PARENTHESIZED_P (x
))
2505 tree p
= maybe_undo_parenthesized_ref (x
);
2512 loc
= EXPR_LOCATION (x
);
2513 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), false);
2515 /* Cope with user tricks that amount to offsetof. */
2516 if (op0
!= error_mark_node
2517 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0
)))
2519 tree val
= get_base_address (op0
);
2521 && INDIRECT_REF_P (val
)
2522 && COMPLETE_TYPE_P (TREE_TYPE (val
))
2523 && TREE_CONSTANT (TREE_OPERAND (val
, 0)))
2525 val
= TREE_OPERAND (val
, 0);
2527 val
= maybe_constant_value (val
);
2528 if (TREE_CODE (val
) == INTEGER_CST
)
2529 return fold_offsetof (op0
, TREE_TYPE (x
));
2539 case FIX_TRUNC_EXPR
:
2545 case TRUTH_NOT_EXPR
:
2546 case FIXED_CONVERT_EXPR
:
2549 loc
= EXPR_LOCATION (x
);
2550 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2553 if (op0
!= TREE_OPERAND (x
, 0))
2555 if (op0
== error_mark_node
)
2556 x
= error_mark_node
;
2559 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2560 if (code
== INDIRECT_REF
2561 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
2563 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2564 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2565 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2572 gcc_assert (TREE_CODE (x
) != COND_EXPR
2573 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
2576 case UNARY_PLUS_EXPR
:
2577 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2578 if (op0
== error_mark_node
)
2579 x
= error_mark_node
;
2581 x
= fold_convert (TREE_TYPE (x
), op0
);
2584 case POSTDECREMENT_EXPR
:
2585 case POSTINCREMENT_EXPR
:
2587 case PREDECREMENT_EXPR
:
2588 case PREINCREMENT_EXPR
:
2593 case POINTER_PLUS_EXPR
:
2595 case POINTER_DIFF_EXPR
:
2598 case TRUNC_DIV_EXPR
:
2600 case FLOOR_DIV_EXPR
:
2601 case ROUND_DIV_EXPR
:
2602 case TRUNC_MOD_EXPR
:
2604 case ROUND_MOD_EXPR
:
2606 case EXACT_DIV_EXPR
:
2616 case TRUTH_AND_EXPR
:
2617 case TRUTH_ANDIF_EXPR
:
2619 case TRUTH_ORIF_EXPR
:
2620 case TRUTH_XOR_EXPR
:
2621 case LT_EXPR
: case LE_EXPR
:
2622 case GT_EXPR
: case GE_EXPR
:
2623 case EQ_EXPR
: case NE_EXPR
:
2624 case UNORDERED_EXPR
: case ORDERED_EXPR
:
2625 case UNLT_EXPR
: case UNLE_EXPR
:
2626 case UNGT_EXPR
: case UNGE_EXPR
:
2627 case UNEQ_EXPR
: case LTGT_EXPR
:
2628 case RANGE_EXPR
: case COMPLEX_EXPR
:
2630 loc
= EXPR_LOCATION (x
);
2631 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2632 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1));
2634 /* decltype(nullptr) has only one value, so optimize away all comparisons
2635 with that type right away, keeping them in the IL causes troubles for
2636 various optimizations. */
2637 if (COMPARISON_CLASS_P (org_x
)
2638 && TREE_CODE (TREE_TYPE (op0
)) == NULLPTR_TYPE
2639 && TREE_CODE (TREE_TYPE (op1
)) == NULLPTR_TYPE
)
2644 x
= constant_boolean_node (true, TREE_TYPE (x
));
2647 x
= constant_boolean_node (false, TREE_TYPE (x
));
2652 return omit_two_operands_loc (loc
, TREE_TYPE (x
), x
,
2656 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
2658 if (op0
== error_mark_node
|| op1
== error_mark_node
)
2659 x
= error_mark_node
;
2661 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
2666 /* This is only needed for -Wnonnull-compare and only if
2667 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2668 generation, we do it always. */
2669 if (COMPARISON_CLASS_P (org_x
))
2671 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
2673 else if (COMPARISON_CLASS_P (x
))
2675 if (warn_nonnull_compare
2676 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
2677 suppress_warning (x
, OPT_Wnonnull_compare
);
2679 /* Otherwise give up on optimizing these, let GIMPLE folders
2680 optimize those later on. */
2681 else if (op0
!= TREE_OPERAND (org_x
, 0)
2682 || op1
!= TREE_OPERAND (org_x
, 1))
2684 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
2685 if (warn_nonnull_compare
2686 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
2687 suppress_warning (x
, OPT_Wnonnull_compare
);
2697 loc
= EXPR_LOCATION (x
);
2698 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2699 op1
= cp_fold (TREE_OPERAND (x
, 1));
2700 op2
= cp_fold (TREE_OPERAND (x
, 2));
2702 if (TREE_CODE (TREE_TYPE (x
)) == BOOLEAN_TYPE
)
2704 warning_sentinel
s (warn_int_in_bool_context
);
2705 if (!VOID_TYPE_P (TREE_TYPE (op1
)))
2706 op1
= cp_truthvalue_conversion (op1
, tf_warning_or_error
);
2707 if (!VOID_TYPE_P (TREE_TYPE (op2
)))
2708 op2
= cp_truthvalue_conversion (op2
, tf_warning_or_error
);
2710 else if (VOID_TYPE_P (TREE_TYPE (x
)))
2712 if (TREE_CODE (op0
) == INTEGER_CST
)
2714 /* If the condition is constant, fold can fold away
2715 the COND_EXPR. If some statement-level uses of COND_EXPR
2716 have one of the branches NULL, avoid folding crash. */
2718 op1
= build_empty_stmt (loc
);
2720 op2
= build_empty_stmt (loc
);
2724 /* Otherwise, don't bother folding a void condition, since
2725 it can't produce a constant value. */
2726 if (op0
!= TREE_OPERAND (x
, 0)
2727 || op1
!= TREE_OPERAND (x
, 1)
2728 || op2
!= TREE_OPERAND (x
, 2))
2729 x
= build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2734 if (op0
!= TREE_OPERAND (x
, 0)
2735 || op1
!= TREE_OPERAND (x
, 1)
2736 || op2
!= TREE_OPERAND (x
, 2))
2738 if (op0
== error_mark_node
2739 || op1
== error_mark_node
2740 || op2
== error_mark_node
)
2741 x
= error_mark_node
;
2743 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2748 /* A COND_EXPR might have incompatible types in branches if one or both
2749 arms are bitfields. If folding exposed such a branch, fix it up. */
2750 if (TREE_CODE (x
) != code
2751 && x
!= error_mark_node
2752 && !useless_type_conversion_p (TREE_TYPE (org_x
), TREE_TYPE (x
)))
2753 x
= fold_convert (TREE_TYPE (org_x
), x
);
2759 tree callee
= get_callee_fndecl (x
);
2761 /* "Inline" calls to std::move/forward and other cast-like functions
2762 by simply folding them into a corresponding cast to their return
2763 type. This is cheaper than relying on the middle end to do so, and
2764 also means we avoid generating useless debug info for them at all.
2766 At this point the argument has already been converted into a
2767 reference, so it suffices to use a NOP_EXPR to express the
2769 if ((OPTION_SET_P (flag_fold_simple_inlines
)
2770 ? flag_fold_simple_inlines
2772 && call_expr_nargs (x
) == 1
2773 && decl_in_std_namespace_p (callee
)
2774 && DECL_NAME (callee
) != NULL_TREE
2775 && (id_equal (DECL_NAME (callee
), "move")
2776 || id_equal (DECL_NAME (callee
), "forward")
2777 || id_equal (DECL_NAME (callee
), "addressof")
2778 /* This addressof equivalent is used heavily in libstdc++. */
2779 || id_equal (DECL_NAME (callee
), "__addressof")
2780 || id_equal (DECL_NAME (callee
), "as_const")))
2782 r
= CALL_EXPR_ARG (x
, 0);
2783 /* Check that the return and argument types are sane before
2785 if (INDIRECT_TYPE_P (TREE_TYPE (x
))
2786 && INDIRECT_TYPE_P (TREE_TYPE (r
)))
2788 if (!same_type_p (TREE_TYPE (x
), TREE_TYPE (r
)))
2789 r
= build_nop (TREE_TYPE (x
), r
);
2795 int sv
= optimize
, nw
= sv
;
2797 /* Some built-in function calls will be evaluated at compile-time in
2798 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2799 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2800 if (callee
&& fndecl_built_in_p (callee
) && !optimize
2801 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
2802 && current_function_decl
2803 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
2806 if (callee
&& fndecl_built_in_p (callee
, BUILT_IN_FRONTEND
))
2808 switch (DECL_FE_FUNCTION_CODE (callee
))
2810 /* Defer folding __builtin_is_constant_evaluated. */
2811 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
2813 case CP_BUILT_IN_SOURCE_LOCATION
:
2814 x
= fold_builtin_source_location (EXPR_LOCATION (x
));
2816 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
2817 x
= fold_builtin_is_corresponding_member
2818 (EXPR_LOCATION (x
), call_expr_nargs (x
),
2819 &CALL_EXPR_ARG (x
, 0));
2821 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
2822 x
= fold_builtin_is_pointer_inverconvertible_with_class
2823 (EXPR_LOCATION (x
), call_expr_nargs (x
),
2824 &CALL_EXPR_ARG (x
, 0));
2833 && fndecl_built_in_p (callee
, CP_BUILT_IN_SOURCE_LOCATION
,
2836 x
= fold_builtin_source_location (EXPR_LOCATION (x
));
2840 bool changed
= false;
2841 int m
= call_expr_nargs (x
);
2842 for (int i
= 0; i
< m
; i
++)
2844 r
= cp_fold (CALL_EXPR_ARG (x
, i
));
2845 if (r
!= CALL_EXPR_ARG (x
, i
))
2847 if (r
== error_mark_node
)
2849 x
= error_mark_node
;
2854 CALL_EXPR_ARG (x
, i
) = r
;
2858 if (x
== error_mark_node
)
2865 if (TREE_CODE (r
) != CALL_EXPR
)
2873 /* Invoke maybe_constant_value for functions declared
2874 constexpr and not called with AGGR_INIT_EXPRs.
2876 Do constexpr expansion of expressions where the call itself is not
2877 constant, but the call followed by an INDIRECT_REF is. */
2878 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
2880 r
= maybe_constant_value (x
);
2883 if (TREE_CODE (r
) != CALL_EXPR
)
2885 if (DECL_CONSTRUCTOR_P (callee
))
2887 loc
= EXPR_LOCATION (x
);
2888 tree s
= build_fold_indirect_ref_loc (loc
,
2889 CALL_EXPR_ARG (x
, 0));
2890 r
= build2_loc (loc
, INIT_EXPR
, TREE_TYPE (s
), s
, r
);
2903 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
2904 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
2905 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
2907 tree op
= cp_fold (p
->value
);
2910 if (op
== error_mark_node
)
2912 x
= error_mark_node
;
2917 nelts
= elts
->copy ();
2918 (*nelts
)[i
].value
= op
;
2923 x
= build_constructor (TREE_TYPE (x
), nelts
);
2924 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x
)
2925 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x
);
2927 if (VECTOR_TYPE_P (TREE_TYPE (x
)))
2933 bool changed
= false;
2934 int n
= TREE_VEC_LENGTH (x
);
2936 for (int i
= 0; i
< n
; i
++)
2938 tree op
= cp_fold (TREE_VEC_ELT (x
, i
));
2939 if (op
!= TREE_VEC_ELT (x
, i
))
2943 TREE_VEC_ELT (x
, i
) = op
;
2952 case ARRAY_RANGE_REF
:
2954 loc
= EXPR_LOCATION (x
);
2955 op0
= cp_fold (TREE_OPERAND (x
, 0));
2956 op1
= cp_fold (TREE_OPERAND (x
, 1));
2957 op2
= cp_fold (TREE_OPERAND (x
, 2));
2958 op3
= cp_fold (TREE_OPERAND (x
, 3));
2960 if (op0
!= TREE_OPERAND (x
, 0)
2961 || op1
!= TREE_OPERAND (x
, 1)
2962 || op2
!= TREE_OPERAND (x
, 2)
2963 || op3
!= TREE_OPERAND (x
, 3))
2965 if (op0
== error_mark_node
2966 || op1
== error_mark_node
2967 || op2
== error_mark_node
2968 || op3
== error_mark_node
)
2969 x
= error_mark_node
;
2972 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
2973 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2974 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2975 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2983 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2984 folding, evaluates to an invariant. In that case no need to wrap
2985 this folded tree with a SAVE_EXPR. */
2986 r
= cp_fold (TREE_OPERAND (x
, 0));
2987 if (tree_invariant_p (r
))
2992 x
= evaluate_requires_expr (x
);
2999 if (EXPR_P (x
) && TREE_CODE (x
) == code
)
3001 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3002 copy_warning (x
, org_x
);
3005 if (!c
.evaluation_restricted_p ())
3007 fold_cache
->put (org_x
, x
);
3008 /* Prevent that we try to fold an already folded result again. */
3010 fold_cache
->put (x
, x
);
3016 /* Look up either "hot" or "cold" in attribute list LIST. */
3019 lookup_hotness_attribute (tree list
)
3021 for (; list
; list
= TREE_CHAIN (list
))
3023 tree name
= get_attribute_name (list
);
3024 if (is_attribute_p ("hot", name
)
3025 || is_attribute_p ("cold", name
)
3026 || is_attribute_p ("likely", name
)
3027 || is_attribute_p ("unlikely", name
))
3033 /* Remove both "hot" and "cold" attributes from LIST. */
3036 remove_hotness_attribute (tree list
)
3038 list
= remove_attribute ("hot", list
);
3039 list
= remove_attribute ("cold", list
);
3040 list
= remove_attribute ("likely", list
);
3041 list
= remove_attribute ("unlikely", list
);
3045 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3049 process_stmt_hotness_attribute (tree std_attrs
, location_t attrs_loc
)
3051 if (std_attrs
== error_mark_node
)
3053 if (tree attr
= lookup_hotness_attribute (std_attrs
))
3055 tree name
= get_attribute_name (attr
);
3056 bool hot
= (is_attribute_p ("hot", name
)
3057 || is_attribute_p ("likely", name
));
3058 tree pred
= build_predict_expr (hot
? PRED_HOT_LABEL
: PRED_COLD_LABEL
,
3059 hot
? TAKEN
: NOT_TAKEN
);
3060 SET_EXPR_LOCATION (pred
, attrs_loc
);
3062 if (tree other
= lookup_hotness_attribute (TREE_CHAIN (attr
)))
3063 warning (OPT_Wattributes
, "ignoring attribute %qE after earlier %qE",
3064 get_attribute_name (other
), name
);
3065 std_attrs
= remove_hotness_attribute (std_attrs
);
3070 /* Helper of fold_builtin_source_location, return the
3071 std::source_location::__impl type after performing verification
3072 on it. LOC is used for reporting any errors. */
3075 get_source_location_impl_type (location_t loc
)
3077 tree name
= get_identifier ("source_location");
3078 tree decl
= lookup_qualified_name (std_node
, name
);
3079 if (TREE_CODE (decl
) != TYPE_DECL
)
3081 auto_diagnostic_group d
;
3082 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3083 qualified_name_lookup_error (std_node
, name
, decl
, loc
);
3085 error_at (loc
, "%qD is not a type", decl
);
3086 return error_mark_node
;
3088 name
= get_identifier ("__impl");
3089 tree type
= TREE_TYPE (decl
);
3090 decl
= lookup_qualified_name (type
, name
);
3091 if (TREE_CODE (decl
) != TYPE_DECL
)
3093 auto_diagnostic_group d
;
3094 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3095 qualified_name_lookup_error (type
, name
, decl
, loc
);
3097 error_at (loc
, "%qD is not a type", decl
);
3098 return error_mark_node
;
3100 type
= TREE_TYPE (decl
);
3101 if (TREE_CODE (type
) != RECORD_TYPE
)
3103 error_at (loc
, "%qD is not a class type", decl
);
3104 return error_mark_node
;
3108 for (tree field
= TYPE_FIELDS (type
);
3109 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3110 field
= DECL_CHAIN (field
))
3112 if (DECL_NAME (field
) != NULL_TREE
)
3114 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3115 if (strcmp (n
, "_M_file_name") == 0
3116 || strcmp (n
, "_M_function_name") == 0)
3118 if (TREE_TYPE (field
) != const_string_type_node
)
3120 error_at (loc
, "%qD does not have %<const char *%> type",
3122 return error_mark_node
;
3127 else if (strcmp (n
, "_M_line") == 0 || strcmp (n
, "_M_column") == 0)
3129 if (TREE_CODE (TREE_TYPE (field
)) != INTEGER_TYPE
)
3131 error_at (loc
, "%qD does not have integral type", field
);
3132 return error_mark_node
;
3143 error_at (loc
, "%<std::source_location::__impl%> does not contain only "
3144 "non-static data members %<_M_file_name%>, "
3145 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3146 return error_mark_node
;
3148 return build_qualified_type (type
, TYPE_QUAL_CONST
);
3151 /* Type for source_location_table hash_set. */
3152 struct GTY((for_user
)) source_location_table_entry
{
3158 /* Traits class for function start hash maps below. */
3160 struct source_location_table_entry_hash
3161 : ggc_remove
<source_location_table_entry
>
3163 typedef source_location_table_entry value_type
;
3164 typedef source_location_table_entry compare_type
;
3167 hash (const source_location_table_entry
&ref
)
3169 inchash::hash
hstate (0);
3170 hstate
.add_int (ref
.loc
);
3171 hstate
.add_int (ref
.uid
);
3172 return hstate
.end ();
3176 equal (const source_location_table_entry
&ref1
,
3177 const source_location_table_entry
&ref2
)
3179 return ref1
.loc
== ref2
.loc
&& ref1
.uid
== ref2
.uid
;
3183 mark_deleted (source_location_table_entry
&ref
)
3185 ref
.loc
= UNKNOWN_LOCATION
;
3187 ref
.var
= NULL_TREE
;
3190 static const bool empty_zero_p
= true;
3193 mark_empty (source_location_table_entry
&ref
)
3195 ref
.loc
= UNKNOWN_LOCATION
;
3197 ref
.var
= NULL_TREE
;
3201 is_deleted (const source_location_table_entry
&ref
)
3203 return (ref
.loc
== UNKNOWN_LOCATION
3205 && ref
.var
== NULL_TREE
);
3209 is_empty (const source_location_table_entry
&ref
)
3211 return (ref
.loc
== UNKNOWN_LOCATION
3213 && ref
.var
== NULL_TREE
);
3217 pch_nx (source_location_table_entry
&p
)
3219 extern void gt_pch_nx (source_location_table_entry
&);
3224 pch_nx (source_location_table_entry
&p
, gt_pointer_operator op
, void *cookie
)
3226 extern void gt_pch_nx (source_location_table_entry
*, gt_pointer_operator
,
3228 gt_pch_nx (&p
, op
, cookie
);
3232 static GTY(()) hash_table
<source_location_table_entry_hash
>
3233 *source_location_table
;
3234 static GTY(()) unsigned int source_location_id
;
3236 /* Fold __builtin_source_location () call. LOC is the location
3240 fold_builtin_source_location (location_t loc
)
3242 if (source_location_impl
== NULL_TREE
)
3244 auto_diagnostic_group d
;
3245 source_location_impl
= get_source_location_impl_type (loc
);
3246 if (source_location_impl
== error_mark_node
)
3247 inform (loc
, "evaluating %qs", "__builtin_source_location");
3249 if (source_location_impl
== error_mark_node
)
3250 return build_zero_cst (const_ptr_type_node
);
3251 if (source_location_table
== NULL
)
3252 source_location_table
3253 = hash_table
<source_location_table_entry_hash
>::create_ggc (64);
3254 const line_map_ordinary
*map
;
3255 source_location_table_entry entry
;
3257 = linemap_resolve_location (line_table
, loc
, LRK_MACRO_EXPANSION_POINT
,
3259 entry
.uid
= current_function_decl
? DECL_UID (current_function_decl
) : -1;
3260 entry
.var
= error_mark_node
;
3261 source_location_table_entry
*entryp
3262 = source_location_table
->find_slot (entry
, INSERT
);
3269 ASM_GENERATE_INTERNAL_LABEL (tmp_name
, "Lsrc_loc", source_location_id
++);
3270 var
= build_decl (loc
, VAR_DECL
, get_identifier (tmp_name
),
3271 source_location_impl
);
3272 TREE_STATIC (var
) = 1;
3273 TREE_PUBLIC (var
) = 0;
3274 DECL_ARTIFICIAL (var
) = 1;
3275 DECL_IGNORED_P (var
) = 1;
3276 DECL_EXTERNAL (var
) = 0;
3277 DECL_DECLARED_CONSTEXPR_P (var
) = 1;
3278 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var
) = 1;
3279 layout_decl (var
, 0);
3281 vec
<constructor_elt
, va_gc
> *v
= NULL
;
3283 for (tree field
= TYPE_FIELDS (source_location_impl
);
3284 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3285 field
= DECL_CHAIN (field
))
3287 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3288 tree val
= NULL_TREE
;
3289 if (strcmp (n
, "_M_file_name") == 0)
3291 if (const char *fname
= LOCATION_FILE (loc
))
3293 fname
= remap_macro_filename (fname
);
3294 val
= build_string_literal (strlen (fname
) + 1, fname
);
3297 val
= build_string_literal (1, "");
3299 else if (strcmp (n
, "_M_function_name") == 0)
3301 const char *name
= "";
3303 if (current_function_decl
)
3304 name
= cxx_printable_name (current_function_decl
, 2);
3306 val
= build_string_literal (strlen (name
) + 1, name
);
3308 else if (strcmp (n
, "_M_line") == 0)
3309 val
= build_int_cst (TREE_TYPE (field
), LOCATION_LINE (loc
));
3310 else if (strcmp (n
, "_M_column") == 0)
3311 val
= build_int_cst (TREE_TYPE (field
), LOCATION_COLUMN (loc
));
3314 CONSTRUCTOR_APPEND_ELT (v
, field
, val
);
3317 tree ctor
= build_constructor (source_location_impl
, v
);
3318 TREE_CONSTANT (ctor
) = 1;
3319 TREE_STATIC (ctor
) = 1;
3320 DECL_INITIAL (var
) = ctor
;
3321 varpool_node::finalize_decl (var
);
3326 return build_fold_addr_expr_with_type_loc (loc
, var
, const_ptr_type_node
);
3329 #include "gt-cp-cp-gimplify.h"