Require target lra in gcc.dg/pr108095.c
[official-gcc.git] / gcc / cp / cp-gimplify.cc
blobbdf6e5f98ff4d81abc495f3ef4c86b15fb759f7f
1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44 #include "opts.h"
46 /* Flags for cp_fold and cp_fold_r. */
48 enum fold_flags {
49 ff_none = 0,
50 /* Whether we're being called from cp_fold_function. */
51 ff_genericize = 1 << 0,
52 /* Whether we're folding a point where we know we're
53 definitely not in a manifestly constant-evaluated
54 context. */
55 ff_mce_false = 1 << 1,
56 /* Whether we're being called from cp_fold_immediate. */
57 ff_fold_immediate = 1 << 2,
60 using fold_flags_t = int;
62 struct cp_fold_data
64 hash_set<tree> pset;
65 fold_flags_t flags;
66 cp_fold_data (fold_flags_t flags): flags (flags) {}
69 /* Forward declarations. */
71 static tree cp_genericize_r (tree *, int *, void *);
72 static tree cp_fold_r (tree *, int *, void *);
73 static void cp_genericize_tree (tree*, bool);
74 static tree cp_fold (tree, fold_flags_t);
76 /* Genericize a TRY_BLOCK. */
78 static void
79 genericize_try_block (tree *stmt_p)
81 tree body = TRY_STMTS (*stmt_p);
82 tree cleanup = TRY_HANDLERS (*stmt_p);
84 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
87 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
89 static void
90 genericize_catch_block (tree *stmt_p)
92 tree type = HANDLER_TYPE (*stmt_p);
93 tree body = HANDLER_BODY (*stmt_p);
95 /* FIXME should the caught type go in TREE_TYPE? */
96 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
99 /* A terser interface for building a representation of an exception
100 specification. */
102 static tree
103 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
105 tree t;
107 /* FIXME should the allowed types go in TREE_TYPE? */
108 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
109 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
111 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
112 append_to_statement_list (body, &TREE_OPERAND (t, 0));
114 return t;
117 /* Genericize an EH_SPEC_BLOCK by converting it to a
118 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
120 static void
121 genericize_eh_spec_block (tree *stmt_p)
123 tree body = EH_SPEC_STMTS (*stmt_p);
124 tree allowed = EH_SPEC_RAISES (*stmt_p);
125 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
127 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
128 suppress_warning (*stmt_p);
129 suppress_warning (TREE_OPERAND (*stmt_p, 1));
132 /* Return the first non-compound statement in STMT. */
134 tree
135 first_stmt (tree stmt)
137 switch (TREE_CODE (stmt))
139 case STATEMENT_LIST:
140 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
141 return first_stmt (p->stmt);
142 return void_node;
144 case BIND_EXPR:
145 return first_stmt (BIND_EXPR_BODY (stmt));
147 default:
148 return stmt;
152 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
154 static void
155 genericize_if_stmt (tree *stmt_p)
157 tree stmt, cond, then_, else_;
158 location_t locus = EXPR_LOCATION (*stmt_p);
160 stmt = *stmt_p;
161 cond = IF_COND (stmt);
162 then_ = THEN_CLAUSE (stmt);
163 else_ = ELSE_CLAUSE (stmt);
165 if (then_ && else_)
167 tree ft = first_stmt (then_);
168 tree fe = first_stmt (else_);
169 br_predictor pr;
170 if (TREE_CODE (ft) == PREDICT_EXPR
171 && TREE_CODE (fe) == PREDICT_EXPR
172 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
173 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
175 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
176 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
177 warning_at (&richloc, OPT_Wattributes,
178 "both branches of %<if%> statement marked as %qs",
179 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
183 if (!then_)
184 then_ = build_empty_stmt (locus);
185 if (!else_)
186 else_ = build_empty_stmt (locus);
188 /* consteval if has been verified not to have the then_/else_ blocks
189 entered by gotos/case labels from elsewhere, and as then_ block
190 can contain unfolded immediate function calls, we have to discard
191 the then_ block regardless of whether else_ has side-effects or not. */
192 if (IF_STMT_CONSTEVAL_P (stmt))
194 if (block_may_fallthru (then_))
195 stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
196 void_node, else_);
197 else
198 stmt = else_;
200 else if (IF_STMT_CONSTEXPR_P (stmt))
201 stmt = integer_nonzerop (cond) ? then_ : else_;
202 /* ??? This optimization doesn't seem to belong here, but removing it
203 causes -Wreturn-type regressions (e.g. 107310). */
204 else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
205 stmt = then_;
206 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
207 stmt = else_;
208 else
209 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
210 protected_set_expr_location_if_unset (stmt, locus);
211 *stmt_p = stmt;
214 /* Hook into the middle of gimplifying an OMP_FOR node. */
216 static enum gimplify_status
217 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
219 tree for_stmt = *expr_p;
220 gimple_seq seq = NULL;
222 /* Protect ourselves from recursion. */
223 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
224 return GS_UNHANDLED;
225 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
227 gimplify_and_add (for_stmt, &seq);
228 gimple_seq_add_seq (pre_p, seq);
230 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
232 return GS_ALL_DONE;
235 /* Gimplify an EXPR_STMT node. */
237 static void
238 gimplify_expr_stmt (tree *stmt_p)
240 tree stmt = EXPR_STMT_EXPR (*stmt_p);
242 if (stmt == error_mark_node)
243 stmt = NULL;
245 /* Gimplification of a statement expression will nullify the
246 statement if all its side effects are moved to *PRE_P and *POST_P.
248 In this case we will not want to emit the gimplified statement.
249 However, we may still want to emit a warning, so we do that before
250 gimplification. */
251 if (stmt && warn_unused_value)
253 if (!TREE_SIDE_EFFECTS (stmt))
255 if (!IS_EMPTY_STMT (stmt)
256 && !VOID_TYPE_P (TREE_TYPE (stmt))
257 && !warning_suppressed_p (stmt, OPT_Wunused_value))
258 warning (OPT_Wunused_value, "statement with no effect");
260 else
261 warn_if_unused_value (stmt, input_location);
264 if (stmt == NULL_TREE)
265 stmt = alloc_stmt_list ();
267 *stmt_p = stmt;
270 /* Gimplify initialization from an AGGR_INIT_EXPR. */
272 static void
273 cp_gimplify_init_expr (tree *expr_p)
275 tree from = TREE_OPERAND (*expr_p, 1);
276 tree to = TREE_OPERAND (*expr_p, 0);
277 tree t;
279 if (TREE_CODE (from) == TARGET_EXPR)
280 if (tree init = TARGET_EXPR_INITIAL (from))
282 /* Make sure that we expected to elide this temporary. But also allow
283 gimplify_modify_expr_rhs to elide temporaries of trivial type. */
284 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from)
285 || !TREE_ADDRESSABLE (TREE_TYPE (from)));
286 if (target_expr_needs_replace (from))
288 /* If this was changed by cp_genericize_target_expr, we need to
289 walk into it to replace uses of the slot. */
290 replace_decl (&init, TARGET_EXPR_SLOT (from), to);
291 *expr_p = init;
292 return;
294 else
295 from = init;
298 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
299 inside the TARGET_EXPR. */
300 for (t = from; t; )
302 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
304 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
305 replace the slot operand with our target.
307 Should we add a target parm to gimplify_expr instead? No, as in this
308 case we want to replace the INIT_EXPR. */
309 if (TREE_CODE (sub) == AGGR_INIT_EXPR
310 || TREE_CODE (sub) == VEC_INIT_EXPR)
312 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
313 AGGR_INIT_EXPR_SLOT (sub) = to;
314 else
315 VEC_INIT_EXPR_SLOT (sub) = to;
316 *expr_p = from;
318 /* The initialization is now a side-effect, so the container can
319 become void. */
320 if (from != sub)
321 TREE_TYPE (from) = void_type_node;
324 /* Handle aggregate NSDMI. */
325 replace_placeholders (sub, to);
327 if (t == sub)
328 break;
329 else
330 t = TREE_OPERAND (t, 1);
335 /* Gimplify a MUST_NOT_THROW_EXPR. */
337 static enum gimplify_status
338 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
340 tree stmt = *expr_p;
341 tree temp = voidify_wrapper_expr (stmt, NULL);
342 tree body = TREE_OPERAND (stmt, 0);
343 gimple_seq try_ = NULL;
344 gimple_seq catch_ = NULL;
345 gimple *mnt;
347 gimplify_and_add (body, &try_);
348 mnt = gimple_build_eh_must_not_throw (call_terminate_fn);
349 gimple_seq_add_stmt_without_update (&catch_, mnt);
350 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
352 gimple_seq_add_stmt_without_update (pre_p, mnt);
353 if (temp)
355 *expr_p = temp;
356 return GS_OK;
359 *expr_p = NULL;
360 return GS_ALL_DONE;
363 /* Return TRUE if an operand (OP) of a given TYPE being copied is
364 really just an empty class copy.
366 Check that the operand has a simple form so that TARGET_EXPRs and
367 non-empty CONSTRUCTORs get reduced properly, and we leave the
368 return slot optimization alone because it isn't a copy. */
370 bool
371 simple_empty_class_p (tree type, tree op, tree_code code)
373 if (TREE_CODE (op) == COMPOUND_EXPR)
374 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
375 if (SIMPLE_TARGET_EXPR_P (op)
376 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
377 /* The TARGET_EXPR is itself a simple copy, look through it. */
378 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
380 if (TREE_CODE (op) == PARM_DECL
381 && TREE_ADDRESSABLE (TREE_TYPE (op)))
383 tree fn = DECL_CONTEXT (op);
384 if (DECL_THUNK_P (fn)
385 || lambda_static_thunk_p (fn))
386 /* In a thunk, we pass through invisible reference parms, so this isn't
387 actually a copy. */
388 return false;
391 return
392 (TREE_CODE (op) == EMPTY_CLASS_EXPR
393 || code == MODIFY_EXPR
394 || is_gimple_lvalue (op)
395 || INDIRECT_REF_P (op)
396 || (TREE_CODE (op) == CONSTRUCTOR
397 && CONSTRUCTOR_NELTS (op) == 0)
398 || (TREE_CODE (op) == CALL_EXPR
399 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
400 && !TREE_CLOBBER_P (op)
401 && is_really_empty_class (type, /*ignore_vptr*/true);
404 /* Returns true if evaluating E as an lvalue has side-effects;
405 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
406 have side-effects until there is a read or write through it. */
408 static bool
409 lvalue_has_side_effects (tree e)
411 if (!TREE_SIDE_EFFECTS (e))
412 return false;
413 while (handled_component_p (e))
415 if (TREE_CODE (e) == ARRAY_REF
416 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
417 return true;
418 e = TREE_OPERAND (e, 0);
420 if (DECL_P (e))
421 /* Just naming a variable has no side-effects. */
422 return false;
423 else if (INDIRECT_REF_P (e))
424 /* Similarly, indirection has no side-effects. */
425 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
426 else
427 /* For anything else, trust TREE_SIDE_EFFECTS. */
428 return TREE_SIDE_EFFECTS (e);
431 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
432 by expressions with side-effects in other operands. */
434 static enum gimplify_status
435 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
436 bool (*gimple_test_f) (tree))
438 enum gimplify_status t
439 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
440 if (t == GS_ERROR)
441 return GS_ERROR;
442 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
443 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
444 return t;
447 /* Like gimplify_arg, but if ORDERED is set (which should be set if
448 any of the arguments this argument is sequenced before has
449 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
450 are gimplified into SSA_NAME or a fresh temporary and for
451 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
453 static enum gimplify_status
454 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
455 bool ordered)
457 enum gimplify_status t;
458 if (ordered
459 && !is_gimple_reg_type (TREE_TYPE (*arg_p))
460 && TREE_CODE (*arg_p) == TARGET_EXPR)
462 /* gimplify_arg would strip away the TARGET_EXPR, but
463 that can mean we don't copy the argument and some following
464 argument with side-effect could modify it. */
465 protected_set_expr_location (*arg_p, call_location);
466 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
468 else
470 t = gimplify_arg (arg_p, pre_p, call_location);
471 if (t == GS_ERROR)
472 return GS_ERROR;
473 else if (ordered
474 && is_gimple_reg_type (TREE_TYPE (*arg_p))
475 && is_gimple_variable (*arg_p)
476 && TREE_CODE (*arg_p) != SSA_NAME
477 /* No need to force references into register, references
478 can't be modified. */
479 && !TYPE_REF_P (TREE_TYPE (*arg_p))
480 /* And this can't be modified either. */
481 && *arg_p != current_class_ptr)
482 *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
483 return t;
488 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
491 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
493 int saved_stmts_are_full_exprs_p = 0;
494 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
495 enum tree_code code = TREE_CODE (*expr_p);
496 enum gimplify_status ret;
498 if (STATEMENT_CODE_P (code))
500 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
501 current_stmt_tree ()->stmts_are_full_exprs_p
502 = STMT_IS_FULL_EXPR_P (*expr_p);
505 switch (code)
507 case AGGR_INIT_EXPR:
508 simplify_aggr_init_expr (expr_p);
509 ret = GS_OK;
510 break;
512 case VEC_INIT_EXPR:
514 *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
515 tf_warning_or_error);
517 cp_fold_data data (ff_genericize | ff_mce_false);
518 cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
519 cp_genericize_tree (expr_p, false);
520 copy_if_shared (expr_p);
521 ret = GS_OK;
523 break;
525 case THROW_EXPR:
526 /* FIXME communicate throw type to back end, probably by moving
527 THROW_EXPR into ../tree.def. */
528 *expr_p = TREE_OPERAND (*expr_p, 0);
529 ret = GS_OK;
530 break;
532 case MUST_NOT_THROW_EXPR:
533 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
534 break;
536 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
537 LHS of an assignment might also be involved in the RHS, as in bug
538 25979. */
539 case INIT_EXPR:
540 cp_gimplify_init_expr (expr_p);
541 if (TREE_CODE (*expr_p) != INIT_EXPR)
542 return GS_OK;
543 /* Fall through. */
544 case MODIFY_EXPR:
545 modify_expr_case:
547 /* If the back end isn't clever enough to know that the lhs and rhs
548 types are the same, add an explicit conversion. */
549 tree op0 = TREE_OPERAND (*expr_p, 0);
550 tree op1 = TREE_OPERAND (*expr_p, 1);
552 if (!error_operand_p (op0)
553 && !error_operand_p (op1)
554 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
555 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
556 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
557 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
558 TREE_TYPE (op0), op1);
560 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
562 while (TREE_CODE (op1) == TARGET_EXPR)
563 /* We're disconnecting the initializer from its target,
564 don't create a temporary. */
565 op1 = TARGET_EXPR_INITIAL (op1);
567 /* Remove any copies of empty classes. Also drop volatile
568 variables on the RHS to avoid infinite recursion from
569 gimplify_expr trying to load the value. */
570 if (TREE_SIDE_EFFECTS (op1))
572 if (TREE_THIS_VOLATILE (op1)
573 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
574 op1 = build_fold_addr_expr (op1);
576 gimplify_and_add (op1, pre_p);
578 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
579 is_gimple_lvalue, fb_lvalue);
580 *expr_p = TREE_OPERAND (*expr_p, 0);
581 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
582 /* Avoid 'return *<retval>;' */
583 *expr_p = TREE_OPERAND (*expr_p, 0);
585 /* P0145 says that the RHS is sequenced before the LHS.
586 gimplify_modify_expr gimplifies the RHS before the LHS, but that
587 isn't quite strong enough in two cases:
589 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
590 mean it's evaluated after the LHS.
592 2) the value calculation of the RHS is also sequenced before the
593 LHS, so for scalar assignment we need to preevaluate if the
594 RHS could be affected by LHS side-effects even if it has no
595 side-effects of its own. We don't need this for classes because
596 class assignment takes its RHS by reference. */
597 else if (flag_strong_eval_order > 1
598 && TREE_CODE (*expr_p) == MODIFY_EXPR
599 && lvalue_has_side_effects (op0)
600 && (TREE_CODE (op1) == CALL_EXPR
601 || (SCALAR_TYPE_P (TREE_TYPE (op1))
602 && !TREE_CONSTANT (op1))))
603 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
605 ret = GS_OK;
606 break;
608 case EMPTY_CLASS_EXPR:
609 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
610 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
611 ret = GS_OK;
612 break;
614 case BASELINK:
615 *expr_p = BASELINK_FUNCTIONS (*expr_p);
616 ret = GS_OK;
617 break;
619 case TRY_BLOCK:
620 genericize_try_block (expr_p);
621 ret = GS_OK;
622 break;
624 case HANDLER:
625 genericize_catch_block (expr_p);
626 ret = GS_OK;
627 break;
629 case EH_SPEC_BLOCK:
630 genericize_eh_spec_block (expr_p);
631 ret = GS_OK;
632 break;
634 case USING_STMT:
635 gcc_unreachable ();
637 case FOR_STMT:
638 case WHILE_STMT:
639 case DO_STMT:
640 case SWITCH_STMT:
641 case CONTINUE_STMT:
642 case BREAK_STMT:
643 gcc_unreachable ();
645 case OMP_FOR:
646 case OMP_SIMD:
647 case OMP_DISTRIBUTE:
648 case OMP_LOOP:
649 case OMP_TASKLOOP:
650 ret = cp_gimplify_omp_for (expr_p, pre_p);
651 break;
653 case EXPR_STMT:
654 gimplify_expr_stmt (expr_p);
655 ret = GS_OK;
656 break;
658 case UNARY_PLUS_EXPR:
660 tree arg = TREE_OPERAND (*expr_p, 0);
661 tree type = TREE_TYPE (*expr_p);
662 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
663 : arg;
664 ret = GS_OK;
666 break;
668 case CALL_EXPR:
669 ret = GS_OK;
670 if (flag_strong_eval_order == 2
671 && CALL_EXPR_FN (*expr_p)
672 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
673 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
675 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
676 enum gimplify_status t
677 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
678 is_gimple_call_addr);
679 if (t == GS_ERROR)
680 ret = GS_ERROR;
681 /* GIMPLE considers most pointer conversion useless, but for
682 calls we actually care about the exact function pointer type. */
683 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
684 CALL_EXPR_FN (*expr_p)
685 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
687 if (!CALL_EXPR_FN (*expr_p))
688 /* Internal function call. */;
689 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
691 /* This is a call to a (compound) assignment operator that used
692 the operator syntax; gimplify the RHS first. */
693 gcc_assert (call_expr_nargs (*expr_p) == 2);
694 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
695 enum gimplify_status t
696 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
697 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
698 if (t == GS_ERROR)
699 ret = GS_ERROR;
701 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
703 /* Leave the last argument for gimplify_call_expr, to avoid problems
704 with __builtin_va_arg_pack(). */
705 int nargs = call_expr_nargs (*expr_p) - 1;
706 int last_side_effects_arg = -1;
707 for (int i = nargs; i > 0; --i)
708 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
710 last_side_effects_arg = i;
711 break;
713 for (int i = 0; i < nargs; ++i)
715 enum gimplify_status t
716 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
717 i < last_side_effects_arg);
718 if (t == GS_ERROR)
719 ret = GS_ERROR;
722 else if (flag_strong_eval_order
723 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
725 /* If flag_strong_eval_order, evaluate the object argument first. */
726 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
727 if (INDIRECT_TYPE_P (fntype))
728 fntype = TREE_TYPE (fntype);
729 if (TREE_CODE (fntype) == METHOD_TYPE)
731 int nargs = call_expr_nargs (*expr_p);
732 bool side_effects = false;
733 for (int i = 1; i < nargs; ++i)
734 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
736 side_effects = true;
737 break;
739 enum gimplify_status t
740 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
741 side_effects);
742 if (t == GS_ERROR)
743 ret = GS_ERROR;
746 if (ret != GS_ERROR)
748 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
749 if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
750 switch (DECL_FE_FUNCTION_CODE (decl))
752 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
753 *expr_p = boolean_false_node;
754 break;
755 case CP_BUILT_IN_SOURCE_LOCATION:
756 *expr_p
757 = fold_builtin_source_location (*expr_p);
758 break;
759 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
760 *expr_p
761 = fold_builtin_is_corresponding_member
762 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
763 &CALL_EXPR_ARG (*expr_p, 0));
764 break;
765 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
766 *expr_p
767 = fold_builtin_is_pointer_inverconvertible_with_class
768 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
769 &CALL_EXPR_ARG (*expr_p, 0));
770 break;
771 default:
772 break;
775 break;
777 case TARGET_EXPR:
778 /* A TARGET_EXPR that expresses direct-initialization should have been
779 elided by cp_gimplify_init_expr. */
780 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
781 /* Likewise, but allow extra temps of trivial type so that
782 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
783 on the rhs of an assignment, as in constexpr-aggr1.C. */
784 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p)
785 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p)));
786 ret = GS_UNHANDLED;
787 break;
789 case PTRMEM_CST:
790 *expr_p = cplus_expand_constant (*expr_p);
791 if (TREE_CODE (*expr_p) == PTRMEM_CST)
792 ret = GS_ERROR;
793 else
794 ret = GS_OK;
795 break;
797 case RETURN_EXPR:
798 if (TREE_OPERAND (*expr_p, 0)
799 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
800 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
802 expr_p = &TREE_OPERAND (*expr_p, 0);
803 /* Avoid going through the INIT_EXPR case, which can
804 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
805 goto modify_expr_case;
807 /* Fall through. */
809 default:
810 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
811 break;
814 /* Restore saved state. */
815 if (STATEMENT_CODE_P (code))
816 current_stmt_tree ()->stmts_are_full_exprs_p
817 = saved_stmts_are_full_exprs_p;
819 return ret;
822 static inline bool
823 is_invisiref_parm (const_tree t)
825 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
826 && DECL_BY_REFERENCE (t));
829 /* A stable comparison routine for use with splay trees and DECLs. */
831 static int
832 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
834 tree a = (tree) xa;
835 tree b = (tree) xb;
837 return DECL_UID (a) - DECL_UID (b);
840 /* OpenMP context during genericization. */
842 struct cp_genericize_omp_taskreg
844 bool is_parallel;
845 bool default_shared;
846 struct cp_genericize_omp_taskreg *outer;
847 splay_tree variables;
850 /* Return true if genericization should try to determine if
851 DECL is firstprivate or shared within task regions. */
853 static bool
854 omp_var_to_track (tree decl)
856 tree type = TREE_TYPE (decl);
857 if (is_invisiref_parm (decl))
858 type = TREE_TYPE (type);
859 else if (TYPE_REF_P (type))
860 type = TREE_TYPE (type);
861 while (TREE_CODE (type) == ARRAY_TYPE)
862 type = TREE_TYPE (type);
863 if (type == error_mark_node || !CLASS_TYPE_P (type))
864 return false;
865 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
866 return false;
867 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
868 return false;
869 return true;
872 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
874 static void
875 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
877 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
878 (splay_tree_key) decl);
879 if (n == NULL)
881 int flags = OMP_CLAUSE_DEFAULT_SHARED;
882 if (omp_ctx->outer)
883 omp_cxx_notice_variable (omp_ctx->outer, decl);
884 if (!omp_ctx->default_shared)
886 struct cp_genericize_omp_taskreg *octx;
888 for (octx = omp_ctx->outer; octx; octx = octx->outer)
890 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
891 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
893 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
894 break;
896 if (octx->is_parallel)
897 break;
899 if (octx == NULL
900 && (TREE_CODE (decl) == PARM_DECL
901 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
902 && DECL_CONTEXT (decl) == current_function_decl)))
903 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
904 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
906 /* DECL is implicitly determined firstprivate in
907 the current task construct. Ensure copy ctor and
908 dtor are instantiated, because during gimplification
909 it will be already too late. */
910 tree type = TREE_TYPE (decl);
911 if (is_invisiref_parm (decl))
912 type = TREE_TYPE (type);
913 else if (TYPE_REF_P (type))
914 type = TREE_TYPE (type);
915 while (TREE_CODE (type) == ARRAY_TYPE)
916 type = TREE_TYPE (type);
917 get_copy_ctor (type, tf_none);
918 get_dtor (type, tf_none);
921 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
925 /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
926 not expected to elide, e.g. because unsafe_copy_elision_p is true. */
928 static bool
929 any_non_eliding_target_exprs (tree ctor)
931 for (const constructor_elt &e : *CONSTRUCTOR_ELTS (ctor))
933 if (TREE_CODE (e.value) == TARGET_EXPR
934 && !TARGET_EXPR_ELIDING_P (e.value))
935 return true;
937 return false;
940 /* If we might need to clean up a partially constructed object, break down the
941 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
942 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
943 the result. */
945 static void
946 cp_genericize_init (tree *replace, tree from, tree to)
948 tree init = NULL_TREE;
949 if (TREE_CODE (from) == VEC_INIT_EXPR)
950 init = expand_vec_init_expr (to, from, tf_warning_or_error);
951 else if (TREE_CODE (from) == CONSTRUCTOR
952 && TREE_SIDE_EFFECTS (from)
953 && ((flag_exceptions
954 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
955 || any_non_eliding_target_exprs (from)))
957 to = cp_stabilize_reference (to);
958 replace_placeholders (from, to);
959 init = split_nonconstant_init (to, from);
962 if (init)
964 if (*replace == from)
965 /* Make cp_gimplify_init_expr call replace_decl on this
966 TARGET_EXPR_INITIAL. */
967 init = fold_convert (void_type_node, init);
968 *replace = init;
972 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
974 static void
975 cp_genericize_init_expr (tree *stmt_p)
977 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
978 tree to = TREE_OPERAND (*stmt_p, 0);
979 tree from = TREE_OPERAND (*stmt_p, 1);
980 if (SIMPLE_TARGET_EXPR_P (from)
981 /* Return gets confused if we clobber its INIT_EXPR this soon. */
982 && TREE_CODE (to) != RESULT_DECL)
983 from = TARGET_EXPR_INITIAL (from);
984 cp_genericize_init (stmt_p, from, to);
987 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
988 replace_decl later when we know what we're initializing. */
990 static void
991 cp_genericize_target_expr (tree *stmt_p)
993 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
994 tree slot = TARGET_EXPR_SLOT (*stmt_p);
995 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
996 TARGET_EXPR_INITIAL (*stmt_p), slot);
997 gcc_assert (!DECL_INITIAL (slot));
1000 /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
1001 TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1002 replacement when cp_folding TARGET_EXPR to preserve the invariant that
1003 AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1005 static bool
1006 maybe_replace_decl (tree *tp, tree decl, tree replacement)
1008 if (!*tp || !VOID_TYPE_P (TREE_TYPE (*tp)))
1009 return false;
1010 tree t = *tp;
1011 while (TREE_CODE (t) == COMPOUND_EXPR)
1012 t = TREE_OPERAND (t, 1);
1013 if (TREE_CODE (t) == AGGR_INIT_EXPR)
1014 replace_decl (&AGGR_INIT_EXPR_SLOT (t), decl, replacement);
1015 else if (TREE_CODE (t) == VEC_INIT_EXPR)
1016 replace_decl (&VEC_INIT_EXPR_SLOT (t), decl, replacement);
1017 else
1018 replace_decl (tp, decl, replacement);
1019 return true;
1022 /* Genericization context. */
1024 struct cp_genericize_data
1026 hash_set<tree> *p_set;
1027 auto_vec<tree> bind_expr_stack;
1028 struct cp_genericize_omp_taskreg *omp_ctx;
1029 tree try_block;
1030 bool no_sanitize_p;
1031 bool handle_invisiref_parm_p;
1034 /* A subroutine of cp_fold_r to handle immediate functions. */
1036 static tree
1037 cp_fold_immediate_r (tree *stmt_p, int *walk_subtrees, void *data_)
1039 auto data = static_cast<cp_fold_data *>(data_);
1040 tree stmt = *stmt_p;
1041 /* The purpose of this is not to emit errors for mce_unknown. */
1042 const tsubst_flags_t complain = (data->flags & ff_mce_false
1043 ? tf_error : tf_none);
1045 /* No need to look into types or unevaluated operands.
1046 NB: This affects cp_fold_r as well. */
1047 if (TYPE_P (stmt) || unevaluated_p (TREE_CODE (stmt)))
1049 *walk_subtrees = 0;
1050 return NULL_TREE;
1053 switch (TREE_CODE (stmt))
1055 /* Unfortunately we must handle code like
1056 false ? bar () : 42
1057 where we have to check bar too. The cp_fold call in cp_fold_r could
1058 fold the ?: into a constant before we see it here. */
1059 case COND_EXPR:
1060 /* If we are called from cp_fold_immediate, we don't need to worry about
1061 cp_fold folding away the COND_EXPR. */
1062 if (data->flags & ff_fold_immediate)
1063 break;
1064 if (TREE_OPERAND (stmt, 1)
1065 && cp_walk_tree (&TREE_OPERAND (stmt, 1), cp_fold_immediate_r, data,
1066 nullptr))
1067 return error_mark_node;
1068 if (TREE_OPERAND (stmt, 2)
1069 && cp_walk_tree (&TREE_OPERAND (stmt, 2), cp_fold_immediate_r, data,
1070 nullptr))
1071 return error_mark_node;
1072 /* We're done here. Don't clear *walk_subtrees here though: we're called
1073 from cp_fold_r and we must let it recurse on the expression with
1074 cp_fold. */
1075 break;
1076 case PTRMEM_CST:
1077 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
1078 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
1080 if (!data->pset.add (stmt) && (complain & tf_error))
1082 error_at (PTRMEM_CST_LOCATION (stmt),
1083 "taking address of an immediate function %qD",
1084 PTRMEM_CST_MEMBER (stmt));
1085 *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1087 return error_mark_node;
1089 break;
1091 /* Expand immediate invocations. */
1092 case CALL_EXPR:
1093 case AGGR_INIT_EXPR:
1094 if (tree fn = cp_get_callee (stmt))
1095 if (TREE_CODE (fn) != ADDR_EXPR || ADDR_EXPR_DENOTES_CALL_P (fn))
1096 if (tree fndecl = cp_get_fndecl_from_callee (fn, /*fold*/false))
1097 if (DECL_IMMEDIATE_FUNCTION_P (fndecl))
1099 stmt = cxx_constant_value (stmt, complain);
1100 if (stmt == error_mark_node)
1102 if (complain & tf_error)
1103 *stmt_p = error_mark_node;
1104 return error_mark_node;
1106 *stmt_p = stmt;
1108 break;
1110 case ADDR_EXPR:
1111 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
1112 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0))
1113 && !ADDR_EXPR_DENOTES_CALL_P (stmt))
1115 if (complain & tf_error)
1117 error_at (EXPR_LOCATION (stmt),
1118 "taking address of an immediate function %qD",
1119 TREE_OPERAND (stmt, 0));
1120 *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1122 return error_mark_node;
1124 break;
1126 default:
1127 break;
1130 return NULL_TREE;
1133 /* A wrapper around cp_fold_immediate_r. Return true if we found
1134 a non-constant immediate function, or taking the address of an
1135 immediate function. */
1137 bool
1138 cp_fold_immediate (tree *tp, mce_value manifestly_const_eval)
1140 if (cxx_dialect <= cxx17)
1141 return false;
1143 fold_flags_t flags = ff_fold_immediate;
1144 if (manifestly_const_eval == mce_false)
1145 flags |= ff_mce_false;
1147 cp_fold_data data (flags);
1148 return !!cp_walk_tree_without_duplicates (tp, cp_fold_immediate_r, &data);
1151 /* Perform any pre-gimplification folding of C++ front end trees to
1152 GENERIC.
1153 Note: The folding of non-omp cases is something to move into
1154 the middle-end. As for now we have most foldings only on GENERIC
1155 in fold-const, we need to perform this before transformation to
1156 GIMPLE-form. */
1158 static tree
1159 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
1161 cp_fold_data *data = (cp_fold_data*)data_;
1162 tree stmt = *stmt_p;
1163 enum tree_code code = TREE_CODE (stmt);
1165 if (cxx_dialect > cxx17)
1166 cp_fold_immediate_r (stmt_p, walk_subtrees, data);
1168 *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
1170 if (data->pset.add (stmt))
1172 /* Don't walk subtrees of stmts we've already walked once, otherwise
1173 we can have exponential complexity with e.g. lots of nested
1174 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1175 always the same tree, which the first time cp_fold_r has been
1176 called on it had the subtrees walked. */
1177 *walk_subtrees = 0;
1178 return NULL_TREE;
1181 code = TREE_CODE (stmt);
1182 switch (code)
1184 tree x;
1185 int i, n;
1186 case OMP_FOR:
1187 case OMP_SIMD:
1188 case OMP_DISTRIBUTE:
1189 case OMP_LOOP:
1190 case OMP_TASKLOOP:
1191 case OACC_LOOP:
1192 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1193 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1194 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1195 x = OMP_FOR_COND (stmt);
1196 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1198 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1199 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1201 else if (x && TREE_CODE (x) == TREE_VEC)
1203 n = TREE_VEC_LENGTH (x);
1204 for (i = 0; i < n; i++)
1206 tree o = TREE_VEC_ELT (x, i);
1207 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1208 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1211 x = OMP_FOR_INCR (stmt);
1212 if (x && TREE_CODE (x) == TREE_VEC)
1214 n = TREE_VEC_LENGTH (x);
1215 for (i = 0; i < n; i++)
1217 tree o = TREE_VEC_ELT (x, i);
1218 if (o && TREE_CODE (o) == MODIFY_EXPR)
1219 o = TREE_OPERAND (o, 1);
1220 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1221 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1223 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1224 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1228 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1229 *walk_subtrees = 0;
1230 return NULL_TREE;
1232 case IF_STMT:
1233 if (IF_STMT_CONSTEVAL_P (stmt))
1235 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1236 boolean_false_node. */
1237 cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1238 cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1239 *walk_subtrees = 0;
1240 return NULL_TREE;
1242 break;
1244 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1245 here rather than in cp_genericize to avoid problems with the invisible
1246 reference transition. */
1247 case INIT_EXPR:
1248 if (data->flags & ff_genericize)
1249 cp_genericize_init_expr (stmt_p);
1250 break;
1252 case TARGET_EXPR:
1253 if (data->flags & ff_genericize)
1254 cp_genericize_target_expr (stmt_p);
1256 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1257 that case, strip it in favor of this one. */
1258 if (tree &init = TARGET_EXPR_INITIAL (stmt))
1260 cp_walk_tree (&init, cp_fold_r, data, NULL);
1261 cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt), cp_fold_r, data, NULL);
1262 *walk_subtrees = 0;
1263 if (TREE_CODE (init) == TARGET_EXPR)
1265 tree sub = TARGET_EXPR_INITIAL (init);
1266 maybe_replace_decl (&sub, TARGET_EXPR_SLOT (init),
1267 TARGET_EXPR_SLOT (stmt));
1268 init = sub;
1271 break;
1273 default:
1274 break;
1277 return NULL_TREE;
1280 /* Fold ALL the trees! FIXME we should be able to remove this, but
1281 apparently that still causes optimization regressions. */
1283 void
1284 cp_fold_function (tree fndecl)
1286 /* By now all manifestly-constant-evaluated expressions will have
1287 been constant-evaluated already if possible, so we can safely
1288 pass ff_mce_false. */
1289 cp_fold_data data (ff_genericize | ff_mce_false);
1290 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1293 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1295 static tree genericize_spaceship (tree expr)
1297 iloc_sentinel s (cp_expr_location (expr));
1298 tree type = TREE_TYPE (expr);
1299 tree op0 = TREE_OPERAND (expr, 0);
1300 tree op1 = TREE_OPERAND (expr, 1);
1301 return genericize_spaceship (input_location, type, op0, op1);
1304 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1305 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1306 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1307 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1309 tree
1310 predeclare_vla (tree expr)
1312 tree type = TREE_TYPE (expr);
1313 if (type == error_mark_node)
1314 return expr;
1315 if (is_typedef_decl (expr))
1316 type = DECL_ORIGINAL_TYPE (expr);
1318 /* We need to strip pointers for gimplify_type_sizes. */
1319 tree vla = type;
1320 while (POINTER_TYPE_P (vla))
1322 if (TYPE_NAME (vla))
1323 return expr;
1324 vla = TREE_TYPE (vla);
1326 if (vla == type || TYPE_NAME (vla)
1327 || !variably_modified_type_p (vla, NULL_TREE))
1328 return expr;
1330 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1331 DECL_ARTIFICIAL (decl) = 1;
1332 TYPE_NAME (vla) = decl;
1333 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1334 if (DECL_P (expr))
1336 add_stmt (dexp);
1337 return NULL_TREE;
1339 else
1341 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1342 return expr;
1346 /* Perform any pre-gimplification lowering of C++ front end trees to
1347 GENERIC. */
1349 static tree
1350 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1352 tree stmt = *stmt_p;
1353 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1354 hash_set<tree> *p_set = wtd->p_set;
1356 /* If in an OpenMP context, note var uses. */
1357 if (UNLIKELY (wtd->omp_ctx != NULL)
1358 && (VAR_P (stmt)
1359 || TREE_CODE (stmt) == PARM_DECL
1360 || TREE_CODE (stmt) == RESULT_DECL)
1361 && omp_var_to_track (stmt))
1362 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1364 /* Don't dereference parms in a thunk, pass the references through. */
1365 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1366 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1368 *walk_subtrees = 0;
1369 return NULL;
1372 /* Dereference invisible reference parms. */
1373 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1375 *stmt_p = convert_from_reference (stmt);
1376 p_set->add (*stmt_p);
1377 *walk_subtrees = 0;
1378 return NULL;
1381 /* Map block scope extern declarations to visible declarations with the
1382 same name and type in outer scopes if any. */
1383 if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1384 if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1386 if (alias != error_mark_node)
1388 *stmt_p = alias;
1389 TREE_USED (alias) |= TREE_USED (stmt);
1391 *walk_subtrees = 0;
1392 return NULL;
1395 if (TREE_CODE (stmt) == INTEGER_CST
1396 && TYPE_REF_P (TREE_TYPE (stmt))
1397 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1398 && !wtd->no_sanitize_p)
1400 ubsan_maybe_instrument_reference (stmt_p);
1401 if (*stmt_p != stmt)
1403 *walk_subtrees = 0;
1404 return NULL_TREE;
1408 /* Other than invisiref parms, don't walk the same tree twice. */
1409 if (p_set->contains (stmt))
1411 *walk_subtrees = 0;
1412 return NULL_TREE;
1415 switch (TREE_CODE (stmt))
1417 case ADDR_EXPR:
1418 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1420 /* If in an OpenMP context, note var uses. */
1421 if (UNLIKELY (wtd->omp_ctx != NULL)
1422 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1423 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1424 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1425 *walk_subtrees = 0;
1427 break;
1429 case RETURN_EXPR:
1430 if (TREE_OPERAND (stmt, 0))
1432 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1433 /* Don't dereference an invisiref RESULT_DECL inside a
1434 RETURN_EXPR. */
1435 *walk_subtrees = 0;
1436 if (RETURN_EXPR_LOCAL_ADDR_P (stmt))
1438 /* Don't return the address of a local variable. */
1439 tree *p = &TREE_OPERAND (stmt, 0);
1440 while (TREE_CODE (*p) == COMPOUND_EXPR)
1441 p = &TREE_OPERAND (*p, 0);
1442 if (TREE_CODE (*p) == INIT_EXPR)
1444 tree op = TREE_OPERAND (*p, 1);
1445 tree new_op = build2 (COMPOUND_EXPR, TREE_TYPE (op), op,
1446 build_zero_cst (TREE_TYPE (op)));
1447 TREE_OPERAND (*p, 1) = new_op;
1451 break;
1453 case OMP_CLAUSE:
1454 switch (OMP_CLAUSE_CODE (stmt))
1456 case OMP_CLAUSE_LASTPRIVATE:
1457 /* Don't dereference an invisiref in OpenMP clauses. */
1458 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1460 *walk_subtrees = 0;
1461 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1462 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1463 cp_genericize_r, data, NULL);
1465 break;
1466 case OMP_CLAUSE_PRIVATE:
1467 /* Don't dereference an invisiref in OpenMP clauses. */
1468 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1469 *walk_subtrees = 0;
1470 else if (wtd->omp_ctx != NULL)
1472 /* Private clause doesn't cause any references to the
1473 var in outer contexts, avoid calling
1474 omp_cxx_notice_variable for it. */
1475 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1476 wtd->omp_ctx = NULL;
1477 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1478 data, NULL);
1479 wtd->omp_ctx = old;
1480 *walk_subtrees = 0;
1482 break;
1483 case OMP_CLAUSE_SHARED:
1484 case OMP_CLAUSE_FIRSTPRIVATE:
1485 case OMP_CLAUSE_COPYIN:
1486 case OMP_CLAUSE_COPYPRIVATE:
1487 case OMP_CLAUSE_INCLUSIVE:
1488 case OMP_CLAUSE_EXCLUSIVE:
1489 /* Don't dereference an invisiref in OpenMP clauses. */
1490 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1491 *walk_subtrees = 0;
1492 break;
1493 case OMP_CLAUSE_REDUCTION:
1494 case OMP_CLAUSE_IN_REDUCTION:
1495 case OMP_CLAUSE_TASK_REDUCTION:
1496 /* Don't dereference an invisiref in reduction clause's
1497 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1498 still needs to be genericized. */
1499 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1501 *walk_subtrees = 0;
1502 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1503 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1504 cp_genericize_r, data, NULL);
1505 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1506 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1507 cp_genericize_r, data, NULL);
1509 break;
1510 default:
1511 break;
1513 break;
1515 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1516 to lower this construct before scanning it, so we need to lower these
1517 before doing anything else. */
1518 case CLEANUP_STMT:
1519 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1520 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1521 : TRY_FINALLY_EXPR,
1522 void_type_node,
1523 CLEANUP_BODY (stmt),
1524 CLEANUP_EXPR (stmt));
1525 break;
1527 case IF_STMT:
1528 genericize_if_stmt (stmt_p);
1529 /* *stmt_p has changed, tail recurse to handle it again. */
1530 return cp_genericize_r (stmt_p, walk_subtrees, data);
1532 /* COND_EXPR might have incompatible types in branches if one or both
1533 arms are bitfields. Fix it up now. */
1534 case COND_EXPR:
1536 tree type_left
1537 = (TREE_OPERAND (stmt, 1)
1538 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1539 : NULL_TREE);
1540 tree type_right
1541 = (TREE_OPERAND (stmt, 2)
1542 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1543 : NULL_TREE);
1544 if (type_left
1545 && !useless_type_conversion_p (TREE_TYPE (stmt),
1546 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1548 TREE_OPERAND (stmt, 1)
1549 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1550 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1551 type_left));
1553 if (type_right
1554 && !useless_type_conversion_p (TREE_TYPE (stmt),
1555 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1557 TREE_OPERAND (stmt, 2)
1558 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1559 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1560 type_right));
1563 break;
1565 case BIND_EXPR:
1566 if (UNLIKELY (wtd->omp_ctx != NULL))
1568 tree decl;
1569 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1570 if (VAR_P (decl)
1571 && !DECL_EXTERNAL (decl)
1572 && omp_var_to_track (decl))
1574 splay_tree_node n
1575 = splay_tree_lookup (wtd->omp_ctx->variables,
1576 (splay_tree_key) decl);
1577 if (n == NULL)
1578 splay_tree_insert (wtd->omp_ctx->variables,
1579 (splay_tree_key) decl,
1580 TREE_STATIC (decl)
1581 ? OMP_CLAUSE_DEFAULT_SHARED
1582 : OMP_CLAUSE_DEFAULT_PRIVATE);
1585 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1587 /* The point here is to not sanitize static initializers. */
1588 bool no_sanitize_p = wtd->no_sanitize_p;
1589 wtd->no_sanitize_p = true;
1590 for (tree decl = BIND_EXPR_VARS (stmt);
1591 decl;
1592 decl = DECL_CHAIN (decl))
1593 if (VAR_P (decl)
1594 && TREE_STATIC (decl)
1595 && DECL_INITIAL (decl))
1596 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1597 wtd->no_sanitize_p = no_sanitize_p;
1599 wtd->bind_expr_stack.safe_push (stmt);
1600 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1601 cp_genericize_r, data, NULL);
1602 wtd->bind_expr_stack.pop ();
1603 break;
1605 case ASSERTION_STMT:
1606 case PRECONDITION_STMT:
1607 case POSTCONDITION_STMT:
1609 if (tree check = build_contract_check (stmt))
1611 *stmt_p = check;
1612 return cp_genericize_r (stmt_p, walk_subtrees, data);
1615 /* If we didn't build a check, replace it with void_node so we don't
1616 leak contracts into GENERIC. */
1617 *stmt_p = void_node;
1618 *walk_subtrees = 0;
1620 break;
1622 case USING_STMT:
1624 tree block = NULL_TREE;
1626 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1627 BLOCK, and append an IMPORTED_DECL to its
1628 BLOCK_VARS chained list. */
1629 if (wtd->bind_expr_stack.exists ())
1631 int i;
1632 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1633 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1634 break;
1636 if (block)
1638 tree decl = TREE_OPERAND (stmt, 0);
1639 gcc_assert (decl);
1641 if (undeduced_auto_decl (decl))
1642 /* Omit from the GENERIC, the back-end can't handle it. */;
1643 else
1645 tree using_directive = make_node (IMPORTED_DECL);
1646 TREE_TYPE (using_directive) = void_type_node;
1647 DECL_CONTEXT (using_directive) = current_function_decl;
1648 DECL_SOURCE_LOCATION (using_directive)
1649 = cp_expr_loc_or_input_loc (stmt);
1651 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1652 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1653 BLOCK_VARS (block) = using_directive;
1656 /* The USING_STMT won't appear in GENERIC. */
1657 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1658 *walk_subtrees = 0;
1660 break;
1662 case DECL_EXPR:
1663 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1665 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1666 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1667 *walk_subtrees = 0;
1669 else
1671 tree d = DECL_EXPR_DECL (stmt);
1672 if (VAR_P (d))
1673 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1675 break;
1677 case OMP_PARALLEL:
1678 case OMP_TASK:
1679 case OMP_TASKLOOP:
1681 struct cp_genericize_omp_taskreg omp_ctx;
1682 tree c, decl;
1683 splay_tree_node n;
1685 *walk_subtrees = 0;
1686 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1687 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1688 omp_ctx.default_shared = omp_ctx.is_parallel;
1689 omp_ctx.outer = wtd->omp_ctx;
1690 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1691 wtd->omp_ctx = &omp_ctx;
1692 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1693 switch (OMP_CLAUSE_CODE (c))
1695 case OMP_CLAUSE_SHARED:
1696 case OMP_CLAUSE_PRIVATE:
1697 case OMP_CLAUSE_FIRSTPRIVATE:
1698 case OMP_CLAUSE_LASTPRIVATE:
1699 decl = OMP_CLAUSE_DECL (c);
1700 if (decl == error_mark_node || !omp_var_to_track (decl))
1701 break;
1702 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1703 if (n != NULL)
1704 break;
1705 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1706 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1707 ? OMP_CLAUSE_DEFAULT_SHARED
1708 : OMP_CLAUSE_DEFAULT_PRIVATE);
1709 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1710 omp_cxx_notice_variable (omp_ctx.outer, decl);
1711 break;
1712 case OMP_CLAUSE_DEFAULT:
1713 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1714 omp_ctx.default_shared = true;
1715 default:
1716 break;
1718 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1719 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1720 cp_genericize_r, cp_walk_subtrees);
1721 else
1722 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1723 wtd->omp_ctx = omp_ctx.outer;
1724 splay_tree_delete (omp_ctx.variables);
1726 break;
1728 case OMP_TARGET:
1729 cfun->has_omp_target = true;
1730 break;
1732 case TRY_BLOCK:
1734 *walk_subtrees = 0;
1735 tree try_block = wtd->try_block;
1736 wtd->try_block = stmt;
1737 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1738 wtd->try_block = try_block;
1739 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1741 break;
1743 case MUST_NOT_THROW_EXPR:
1744 /* MUST_NOT_THROW_COND might be something else with TM. */
1745 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1747 *walk_subtrees = 0;
1748 tree try_block = wtd->try_block;
1749 wtd->try_block = stmt;
1750 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1751 wtd->try_block = try_block;
1753 break;
1755 case THROW_EXPR:
1757 location_t loc = location_of (stmt);
1758 if (warning_suppressed_p (stmt /* What warning? */))
1759 /* Never mind. */;
1760 else if (wtd->try_block)
1762 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1764 auto_diagnostic_group d;
1765 if (warning_at (loc, OPT_Wterminate,
1766 "%<throw%> will always call %<terminate%>")
1767 && cxx_dialect >= cxx11
1768 && DECL_DESTRUCTOR_P (current_function_decl))
1769 inform (loc, "in C++11 destructors default to %<noexcept%>");
1772 else
1774 if (warn_cxx11_compat && cxx_dialect < cxx11
1775 && DECL_DESTRUCTOR_P (current_function_decl)
1776 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1777 == NULL_TREE)
1778 && (get_defaulted_eh_spec (current_function_decl)
1779 == empty_except_spec))
1780 warning_at (loc, OPT_Wc__11_compat,
1781 "in C++11 this %<throw%> will call %<terminate%> "
1782 "because destructors default to %<noexcept%>");
1785 break;
1787 case CONVERT_EXPR:
1788 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt)));
1789 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1790 break;
1792 case SPACESHIP_EXPR:
1793 *stmt_p = genericize_spaceship (*stmt_p);
1794 break;
1796 case PTRMEM_CST:
1797 /* By the time we get here we're handing off to the back end, so we don't
1798 need or want to preserve PTRMEM_CST anymore. */
1799 *stmt_p = cplus_expand_constant (stmt);
1800 *walk_subtrees = 0;
1801 break;
1803 case MEM_REF:
1804 /* For MEM_REF, make sure not to sanitize the second operand even
1805 if it has reference type. It is just an offset with a type
1806 holding other information. There is no other processing we
1807 need to do for INTEGER_CSTs, so just ignore the second argument
1808 unconditionally. */
1809 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1810 *walk_subtrees = 0;
1811 break;
1813 case NOP_EXPR:
1814 *stmt_p = predeclare_vla (*stmt_p);
1815 if (!wtd->no_sanitize_p
1816 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1817 && TYPE_REF_P (TREE_TYPE (stmt)))
1818 ubsan_maybe_instrument_reference (stmt_p);
1819 break;
1821 case CALL_EXPR:
1822 /* Evaluate function concept checks instead of treating them as
1823 normal functions. */
1824 if (concept_check_p (stmt))
1826 *stmt_p = evaluate_concept_check (stmt);
1827 * walk_subtrees = 0;
1828 break;
1831 if (!wtd->no_sanitize_p
1832 && sanitize_flags_p ((SANITIZE_NULL
1833 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1835 tree fn = CALL_EXPR_FN (stmt);
1836 if (fn != NULL_TREE
1837 && !error_operand_p (fn)
1838 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1839 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1841 bool is_ctor
1842 = TREE_CODE (fn) == ADDR_EXPR
1843 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1844 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1845 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1846 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1847 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1848 cp_ubsan_maybe_instrument_member_call (stmt);
1850 else if (fn == NULL_TREE
1851 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1852 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1853 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1854 *walk_subtrees = 0;
1856 /* Fall through. */
1857 case AGGR_INIT_EXPR:
1858 /* For calls to a multi-versioned function, overload resolution
1859 returns the function with the highest target priority, that is,
1860 the version that will checked for dispatching first. If this
1861 version is inlinable, a direct call to this version can be made
1862 otherwise the call should go through the dispatcher. */
1864 tree fn = cp_get_callee_fndecl_nofold (stmt);
1865 if (fn && DECL_FUNCTION_VERSIONED (fn)
1866 && (current_function_decl == NULL
1867 || !targetm.target_option.can_inline_p (current_function_decl,
1868 fn)))
1869 if (tree dis = get_function_version_dispatcher (fn))
1871 mark_versions_used (dis);
1872 dis = build_address (dis);
1873 if (TREE_CODE (stmt) == CALL_EXPR)
1874 CALL_EXPR_FN (stmt) = dis;
1875 else
1876 AGGR_INIT_EXPR_FN (stmt) = dis;
1879 break;
1881 case TARGET_EXPR:
1882 if (TARGET_EXPR_INITIAL (stmt)
1883 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1884 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1885 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1886 break;
1888 case TEMPLATE_ID_EXPR:
1889 gcc_assert (concept_check_p (stmt));
1890 /* Emit the value of the concept check. */
1891 *stmt_p = evaluate_concept_check (stmt);
1892 walk_subtrees = 0;
1893 break;
1895 case OMP_DISTRIBUTE:
1896 /* Need to explicitly instantiate copy ctors on class iterators of
1897 composite distribute parallel for. */
1898 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1900 tree *data[4] = { NULL, NULL, NULL, NULL };
1901 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1902 find_combined_omp_for, data, NULL);
1903 if (inner != NULL_TREE
1904 && TREE_CODE (inner) == OMP_FOR)
1906 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1907 if (OMP_FOR_ORIG_DECLS (inner)
1908 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1909 i)) == TREE_LIST
1910 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1911 i)))
1913 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1914 /* Class iterators aren't allowed on OMP_SIMD, so the only
1915 case we need to solve is distribute parallel for. */
1916 gcc_assert (TREE_CODE (inner) == OMP_FOR
1917 && data[1]);
1918 tree orig_decl = TREE_PURPOSE (orig);
1919 tree c, cl = NULL_TREE;
1920 for (c = OMP_FOR_CLAUSES (inner);
1921 c; c = OMP_CLAUSE_CHAIN (c))
1922 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1923 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1924 && OMP_CLAUSE_DECL (c) == orig_decl)
1926 cl = c;
1927 break;
1929 if (cl == NULL_TREE)
1931 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1932 c; c = OMP_CLAUSE_CHAIN (c))
1933 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1934 && OMP_CLAUSE_DECL (c) == orig_decl)
1936 cl = c;
1937 break;
1940 if (cl)
1942 orig_decl = require_complete_type (orig_decl);
1943 tree inner_type = TREE_TYPE (orig_decl);
1944 if (orig_decl == error_mark_node)
1945 continue;
1946 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1947 inner_type = TREE_TYPE (inner_type);
1949 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1950 inner_type = TREE_TYPE (inner_type);
1951 get_copy_ctor (inner_type, tf_warning_or_error);
1956 /* FALLTHRU */
1958 case FOR_STMT:
1959 case WHILE_STMT:
1960 case DO_STMT:
1961 case SWITCH_STMT:
1962 case CONTINUE_STMT:
1963 case BREAK_STMT:
1964 case OMP_FOR:
1965 case OMP_SIMD:
1966 case OMP_LOOP:
1967 case OACC_LOOP:
1968 case STATEMENT_LIST:
1969 /* These cases are handled by shared code. */
1970 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1971 cp_genericize_r, cp_walk_subtrees);
1972 break;
1974 case BIT_CAST_EXPR:
1975 *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1976 TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1977 break;
1979 default:
1980 if (IS_TYPE_OR_DECL_P (stmt))
1981 *walk_subtrees = 0;
1982 break;
1985 p_set->add (*stmt_p);
1987 return NULL;
1990 /* Lower C++ front end trees to GENERIC in T_P. */
1992 static void
1993 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1995 struct cp_genericize_data wtd;
1997 wtd.p_set = new hash_set<tree>;
1998 wtd.bind_expr_stack.create (0);
1999 wtd.omp_ctx = NULL;
2000 wtd.try_block = NULL_TREE;
2001 wtd.no_sanitize_p = false;
2002 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
2003 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
2004 delete wtd.p_set;
2005 if (sanitize_flags_p (SANITIZE_VPTR))
2006 cp_ubsan_instrument_member_accesses (t_p);
2009 /* If a function that should end with a return in non-void
2010 function doesn't obviously end with return, add ubsan
2011 instrumentation code to verify it at runtime. If -fsanitize=return
2012 is not enabled, instrument __builtin_unreachable. */
2014 static void
2015 cp_maybe_instrument_return (tree fndecl)
2017 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
2018 || DECL_CONSTRUCTOR_P (fndecl)
2019 || DECL_DESTRUCTOR_P (fndecl)
2020 || !targetm.warn_func_return (fndecl))
2021 return;
2023 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
2024 /* Don't add __builtin_unreachable () if not optimizing, it will not
2025 improve any optimizations in that case, just break UB code.
2026 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
2027 UBSan covers this with ubsan_instrument_return above where sufficient
2028 information is provided, while the __builtin_unreachable () below
2029 if return sanitization is disabled will just result in hard to
2030 understand runtime error without location. */
2031 && ((!optimize && !flag_unreachable_traps)
2032 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
2033 return;
2035 tree t = DECL_SAVED_TREE (fndecl);
2036 while (t)
2038 switch (TREE_CODE (t))
2040 case BIND_EXPR:
2041 t = BIND_EXPR_BODY (t);
2042 continue;
2043 case TRY_FINALLY_EXPR:
2044 case CLEANUP_POINT_EXPR:
2045 t = TREE_OPERAND (t, 0);
2046 continue;
2047 case STATEMENT_LIST:
2049 tree_stmt_iterator i = tsi_last (t);
2050 while (!tsi_end_p (i))
2052 tree p = tsi_stmt (i);
2053 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
2054 break;
2055 tsi_prev (&i);
2057 if (!tsi_end_p (i))
2059 t = tsi_stmt (i);
2060 continue;
2063 break;
2064 case RETURN_EXPR:
2065 return;
2066 default:
2067 break;
2069 break;
2071 if (t == NULL_TREE)
2072 return;
2073 tree *p = &DECL_SAVED_TREE (fndecl);
2074 if (TREE_CODE (*p) == BIND_EXPR)
2075 p = &BIND_EXPR_BODY (*p);
2077 location_t loc = DECL_SOURCE_LOCATION (fndecl);
2078 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
2079 t = ubsan_instrument_return (loc);
2080 else
2081 t = build_builtin_unreachable (BUILTINS_LOCATION);
2083 append_to_statement_list (t, p);
2086 void
2087 cp_genericize (tree fndecl)
2089 tree t;
2091 /* Fix up the types of parms passed by invisible reference. */
2092 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
2093 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
2095 /* If a function's arguments are copied to create a thunk,
2096 then DECL_BY_REFERENCE will be set -- but the type of the
2097 argument will be a pointer type, so we will never get
2098 here. */
2099 gcc_assert (!DECL_BY_REFERENCE (t));
2100 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
2101 TREE_TYPE (t) = DECL_ARG_TYPE (t);
2102 DECL_BY_REFERENCE (t) = 1;
2103 TREE_ADDRESSABLE (t) = 0;
2104 relayout_decl (t);
2107 /* Do the same for the return value. */
2108 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
2110 t = DECL_RESULT (fndecl);
2111 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
2112 DECL_BY_REFERENCE (t) = 1;
2113 TREE_ADDRESSABLE (t) = 0;
2114 relayout_decl (t);
2115 if (DECL_NAME (t))
2117 /* Adjust DECL_VALUE_EXPR of the original var. */
2118 tree outer = outer_curly_brace_block (current_function_decl);
2119 tree var;
2121 if (outer)
2122 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2123 if (VAR_P (var)
2124 && DECL_NAME (t) == DECL_NAME (var)
2125 && DECL_HAS_VALUE_EXPR_P (var)
2126 && DECL_VALUE_EXPR (var) == t)
2128 tree val = convert_from_reference (t);
2129 SET_DECL_VALUE_EXPR (var, val);
2130 break;
2135 /* If we're a clone, the body is already GIMPLE. */
2136 if (DECL_CLONED_FUNCTION_P (fndecl))
2137 return;
2139 /* Allow cp_genericize calls to be nested. */
2140 bc_state_t save_state;
2141 save_bc_state (&save_state);
2143 /* We do want to see every occurrence of the parms, so we can't just use
2144 walk_tree's hash functionality. */
2145 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
2147 cp_maybe_instrument_return (fndecl);
2149 /* Do everything else. */
2150 c_genericize (fndecl);
2151 restore_bc_state (&save_state);
2154 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2155 NULL if there is in fact nothing to do. ARG2 may be null if FN
2156 actually only takes one argument. */
2158 static tree
2159 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2161 tree defparm, parm, t;
2162 int i = 0;
2163 int nargs;
2164 tree *argarray;
2166 if (fn == NULL)
2167 return NULL;
2169 nargs = list_length (DECL_ARGUMENTS (fn));
2170 argarray = XALLOCAVEC (tree, nargs);
2172 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2173 if (arg2)
2174 defparm = TREE_CHAIN (defparm);
2176 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
2177 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2179 tree inner_type = TREE_TYPE (arg1);
2180 tree start1, end1, p1;
2181 tree start2 = NULL, p2 = NULL;
2182 tree ret = NULL, lab;
2184 start1 = arg1;
2185 start2 = arg2;
2188 inner_type = TREE_TYPE (inner_type);
2189 start1 = build4 (ARRAY_REF, inner_type, start1,
2190 size_zero_node, NULL, NULL);
2191 if (arg2)
2192 start2 = build4 (ARRAY_REF, inner_type, start2,
2193 size_zero_node, NULL, NULL);
2195 while (TREE_CODE (inner_type) == ARRAY_TYPE);
2196 start1 = build_fold_addr_expr_loc (input_location, start1);
2197 if (arg2)
2198 start2 = build_fold_addr_expr_loc (input_location, start2);
2200 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2201 end1 = fold_build_pointer_plus (start1, end1);
2203 p1 = create_tmp_var (TREE_TYPE (start1));
2204 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2205 append_to_statement_list (t, &ret);
2207 if (arg2)
2209 p2 = create_tmp_var (TREE_TYPE (start2));
2210 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2211 append_to_statement_list (t, &ret);
2214 lab = create_artificial_label (input_location);
2215 t = build1 (LABEL_EXPR, void_type_node, lab);
2216 append_to_statement_list (t, &ret);
2218 argarray[i++] = p1;
2219 if (arg2)
2220 argarray[i++] = p2;
2221 /* Handle default arguments. */
2222 for (parm = defparm; parm && parm != void_list_node;
2223 parm = TREE_CHAIN (parm), i++)
2224 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2225 TREE_PURPOSE (parm), fn,
2226 i - is_method, tf_warning_or_error);
2227 t = build_call_a (fn, i, argarray);
2228 t = fold_convert (void_type_node, t);
2229 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2230 append_to_statement_list (t, &ret);
2232 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2233 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2234 append_to_statement_list (t, &ret);
2236 if (arg2)
2238 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2239 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2240 append_to_statement_list (t, &ret);
2243 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2244 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2245 append_to_statement_list (t, &ret);
2247 return ret;
2249 else
2251 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2252 if (arg2)
2253 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2254 /* Handle default arguments. */
2255 for (parm = defparm; parm && parm != void_list_node;
2256 parm = TREE_CHAIN (parm), i++)
2257 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2258 TREE_PURPOSE (parm), fn,
2259 i - is_method, tf_warning_or_error);
2260 t = build_call_a (fn, i, argarray);
2261 t = fold_convert (void_type_node, t);
2262 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2266 /* Return code to initialize DECL with its default constructor, or
2267 NULL if there's nothing to do. */
2269 tree
2270 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2272 tree info = CP_OMP_CLAUSE_INFO (clause);
2273 tree ret = NULL;
2275 if (info)
2276 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2278 return ret;
2281 /* Return code to initialize DST with a copy constructor from SRC. */
2283 tree
2284 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2286 tree info = CP_OMP_CLAUSE_INFO (clause);
2287 tree ret = NULL;
2289 if (info)
2290 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2291 if (ret == NULL)
2292 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2294 return ret;
2297 /* Similarly, except use an assignment operator instead. */
2299 tree
2300 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2302 tree info = CP_OMP_CLAUSE_INFO (clause);
2303 tree ret = NULL;
2305 if (info)
2306 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2307 if (ret == NULL)
2308 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2310 return ret;
2313 /* Return code to destroy DECL. */
2315 tree
2316 cxx_omp_clause_dtor (tree clause, tree decl)
2318 tree info = CP_OMP_CLAUSE_INFO (clause);
2319 tree ret = NULL;
2321 if (info)
2322 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2324 return ret;
2327 /* True if OpenMP should privatize what this DECL points to rather
2328 than the DECL itself. */
2330 bool
2331 cxx_omp_privatize_by_reference (const_tree decl)
2333 return (TYPE_REF_P (TREE_TYPE (decl))
2334 || is_invisiref_parm (decl));
2337 /* Return true if DECL is const qualified var having no mutable member. */
2338 bool
2339 cxx_omp_const_qual_no_mutable (tree decl)
2341 tree type = TREE_TYPE (decl);
2342 if (TYPE_REF_P (type))
2344 if (!is_invisiref_parm (decl))
2345 return false;
2346 type = TREE_TYPE (type);
2348 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2350 /* NVR doesn't preserve const qualification of the
2351 variable's type. */
2352 tree outer = outer_curly_brace_block (current_function_decl);
2353 tree var;
2355 if (outer)
2356 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2357 if (VAR_P (var)
2358 && DECL_NAME (decl) == DECL_NAME (var)
2359 && (TYPE_MAIN_VARIANT (type)
2360 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2362 if (TYPE_READONLY (TREE_TYPE (var)))
2363 type = TREE_TYPE (var);
2364 break;
2369 if (type == error_mark_node)
2370 return false;
2372 /* Variables with const-qualified type having no mutable member
2373 are predetermined shared. */
2374 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2375 return true;
2377 return false;
2380 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2381 of DECL is predetermined. */
2383 enum omp_clause_default_kind
2384 cxx_omp_predetermined_sharing_1 (tree decl)
2386 /* Static data members are predetermined shared. */
2387 if (TREE_STATIC (decl))
2389 tree ctx = CP_DECL_CONTEXT (decl);
2390 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2391 return OMP_CLAUSE_DEFAULT_SHARED;
2393 if (c_omp_predefined_variable (decl))
2394 return OMP_CLAUSE_DEFAULT_SHARED;
2397 /* this may not be specified in data-sharing clauses, still we need
2398 to predetermined it firstprivate. */
2399 if (decl == current_class_ptr)
2400 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2402 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2405 /* Likewise, but also include the artificial vars. We don't want to
2406 disallow the artificial vars being mentioned in explicit clauses,
2407 as we use artificial vars e.g. for loop constructs with random
2408 access iterators other than pointers, but during gimplification
2409 we want to treat them as predetermined. */
2411 enum omp_clause_default_kind
2412 cxx_omp_predetermined_sharing (tree decl)
2414 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2415 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2416 return ret;
2418 /* Predetermine artificial variables holding integral values, those
2419 are usually result of gimplify_one_sizepos or SAVE_EXPR
2420 gimplification. */
2421 if (VAR_P (decl)
2422 && DECL_ARTIFICIAL (decl)
2423 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2424 && !(DECL_LANG_SPECIFIC (decl)
2425 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2426 return OMP_CLAUSE_DEFAULT_SHARED;
2428 /* Similarly for typeinfo symbols. */
2429 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2430 return OMP_CLAUSE_DEFAULT_SHARED;
2432 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2435 enum omp_clause_defaultmap_kind
2436 cxx_omp_predetermined_mapping (tree decl)
2438 /* Predetermine artificial variables holding integral values, those
2439 are usually result of gimplify_one_sizepos or SAVE_EXPR
2440 gimplification. */
2441 if (VAR_P (decl)
2442 && DECL_ARTIFICIAL (decl)
2443 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2444 && !(DECL_LANG_SPECIFIC (decl)
2445 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2446 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2448 if (c_omp_predefined_variable (decl))
2449 return OMP_CLAUSE_DEFAULTMAP_TO;
2451 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2454 /* Finalize an implicitly determined clause. */
2456 void
2457 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2459 tree decl, inner_type;
2460 bool make_shared = false;
2462 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2463 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2464 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2465 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2466 return;
2468 decl = OMP_CLAUSE_DECL (c);
2469 decl = require_complete_type (decl);
2470 inner_type = TREE_TYPE (decl);
2471 if (decl == error_mark_node)
2472 make_shared = true;
2473 else if (TYPE_REF_P (TREE_TYPE (decl)))
2474 inner_type = TREE_TYPE (inner_type);
2476 /* We're interested in the base element, not arrays. */
2477 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2478 inner_type = TREE_TYPE (inner_type);
2480 /* Check for special function availability by building a call to one.
2481 Save the results, because later we won't be in the right context
2482 for making these queries. */
2483 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2484 bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2485 if (!make_shared
2486 && CLASS_TYPE_P (inner_type)
2487 && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2488 true))
2489 make_shared = true;
2491 if (make_shared)
2493 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2494 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2495 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2499 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2500 disregarded in OpenMP construct, because it is going to be
2501 remapped during OpenMP lowering. SHARED is true if DECL
2502 is going to be shared, false if it is going to be privatized. */
2504 bool
2505 cxx_omp_disregard_value_expr (tree decl, bool shared)
2507 if (shared)
2508 return false;
2509 if (VAR_P (decl)
2510 && DECL_HAS_VALUE_EXPR_P (decl)
2511 && DECL_ARTIFICIAL (decl)
2512 && DECL_LANG_SPECIFIC (decl)
2513 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2514 return true;
2515 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2516 return true;
2517 return false;
2520 /* Fold expression X which is used as an rvalue if RVAL is true. */
2522 static tree
2523 cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
2525 while (true)
2527 x = cp_fold (x, flags);
2528 if (rval)
2529 x = mark_rvalue_use (x);
2530 if (rval && DECL_P (x)
2531 && !TYPE_REF_P (TREE_TYPE (x)))
2533 tree v = decl_constant_value (x);
2534 if (v != x && v != error_mark_node)
2536 x = v;
2537 continue;
2540 break;
2542 return x;
2545 tree
2546 cp_fold_maybe_rvalue (tree x, bool rval)
2548 return cp_fold_maybe_rvalue (x, rval, ff_none);
2551 /* Fold expression X which is used as an rvalue. */
2553 static tree
2554 cp_fold_rvalue (tree x, fold_flags_t flags)
2556 return cp_fold_maybe_rvalue (x, true, flags);
2559 tree
2560 cp_fold_rvalue (tree x)
2562 return cp_fold_rvalue (x, ff_none);
2565 /* Perform folding on expression X. */
2567 static tree
2568 cp_fully_fold (tree x, mce_value manifestly_const_eval)
2570 if (processing_template_decl)
2571 return x;
2572 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2573 have to call both. */
2574 if (cxx_dialect >= cxx11)
2576 x = maybe_constant_value (x, /*decl=*/NULL_TREE, manifestly_const_eval);
2577 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2578 a TARGET_EXPR; undo that here. */
2579 if (TREE_CODE (x) == TARGET_EXPR)
2580 x = TARGET_EXPR_INITIAL (x);
2581 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2582 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2583 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2584 x = TREE_OPERAND (x, 0);
2586 fold_flags_t flags = ff_none;
2587 if (manifestly_const_eval == mce_false)
2588 flags |= ff_mce_false;
2589 return cp_fold_rvalue (x, flags);
2592 tree
2593 cp_fully_fold (tree x)
2595 return cp_fully_fold (x, mce_unknown);
2598 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2599 in some cases. */
2601 tree
2602 cp_fully_fold_init (tree x)
2604 if (processing_template_decl)
2605 return x;
2606 x = cp_fully_fold (x, mce_false);
2607 cp_fold_data data (ff_mce_false);
2608 cp_walk_tree (&x, cp_fold_r, &data, NULL);
2609 return x;
2612 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2613 and certain changes are made to the folding done. Or should be (FIXME). We
2614 never touch maybe_const, as it is only used for the C front-end
2615 C_MAYBE_CONST_EXPR. */
2617 tree
2618 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2620 return cp_fold_maybe_rvalue (x, !lval);
2623 static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
2625 /* Subroutine of cp_fold. Returns which fold cache to use according
2626 to the given flags. We need multiple caches since the result of
2627 folding may depend on which flags are used. */
2629 static hash_map<tree, tree> *&
2630 get_fold_cache (fold_flags_t flags)
2632 if (flags & ff_mce_false)
2633 return fold_caches[1];
2634 else
2635 return fold_caches[0];
2638 /* Dispose of the whole FOLD_CACHE. */
2640 void
2641 clear_fold_cache (void)
2643 for (auto& fold_cache : fold_caches)
2644 if (fold_cache != NULL)
2645 fold_cache->empty ();
2648 /* This function tries to fold an expression X.
2649 To avoid combinatorial explosion, folding results are kept in fold_cache.
2650 If X is invalid, we don't fold at all.
2651 For performance reasons we don't cache expressions representing a
2652 declaration or constant.
2653 Function returns X or its folded variant. */
2655 static tree
2656 cp_fold (tree x, fold_flags_t flags)
2658 tree op0, op1, op2, op3;
2659 tree org_x = x, r = NULL_TREE;
2660 enum tree_code code;
2661 location_t loc;
2662 bool rval_ops = true;
2664 if (!x || x == error_mark_node)
2665 return x;
2667 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2668 return x;
2670 /* Don't bother to cache DECLs or constants. */
2671 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2672 return x;
2674 auto& fold_cache = get_fold_cache (flags);
2675 if (fold_cache == NULL)
2676 fold_cache = hash_map<tree, tree>::create_ggc (101);
2678 if (tree *cached = fold_cache->get (x))
2679 return *cached;
2681 uid_sensitive_constexpr_evaluation_checker c;
2683 code = TREE_CODE (x);
2684 switch (code)
2686 case CLEANUP_POINT_EXPR:
2687 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2688 effects. */
2689 r = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2690 if (!TREE_SIDE_EFFECTS (r))
2691 x = r;
2692 break;
2694 case SIZEOF_EXPR:
2695 x = fold_sizeof_expr (x);
2696 break;
2698 case VIEW_CONVERT_EXPR:
2699 rval_ops = false;
2700 /* FALLTHRU */
2701 case NON_LVALUE_EXPR:
2702 CASE_CONVERT:
2704 if (VOID_TYPE_P (TREE_TYPE (x)))
2706 /* This is just to make sure we don't end up with casts to
2707 void from error_mark_node. If we just return x, then
2708 cp_fold_r might fold the operand into error_mark_node and
2709 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2710 during gimplification doesn't like such casts.
2711 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2712 folding of the operand should be in the caches and if in cp_fold_r
2713 it will modify it in place. */
2714 op0 = cp_fold (TREE_OPERAND (x, 0), flags);
2715 if (op0 == error_mark_node)
2716 x = error_mark_node;
2717 break;
2720 loc = EXPR_LOCATION (x);
2721 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2723 if (code == CONVERT_EXPR
2724 && SCALAR_TYPE_P (TREE_TYPE (x))
2725 && op0 != void_node)
2726 /* During parsing we used convert_to_*_nofold; re-convert now using the
2727 folding variants, since fold() doesn't do those transformations. */
2728 x = fold (convert (TREE_TYPE (x), op0));
2729 else if (op0 != TREE_OPERAND (x, 0))
2731 if (op0 == error_mark_node)
2732 x = error_mark_node;
2733 else
2734 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2736 else
2737 x = fold (x);
2739 /* Conversion of an out-of-range value has implementation-defined
2740 behavior; the language considers it different from arithmetic
2741 overflow, which is undefined. */
2742 if (TREE_CODE (op0) == INTEGER_CST
2743 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2744 TREE_OVERFLOW (x) = false;
2746 break;
2748 case EXCESS_PRECISION_EXPR:
2749 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2750 x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
2751 break;
2753 case INDIRECT_REF:
2754 /* We don't need the decltype(auto) obfuscation anymore. */
2755 if (REF_PARENTHESIZED_P (x))
2757 tree p = maybe_undo_parenthesized_ref (x);
2758 if (p != x)
2759 return cp_fold (p, flags);
2761 goto unary;
2763 case ADDR_EXPR:
2764 loc = EXPR_LOCATION (x);
2765 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
2767 /* Cope with user tricks that amount to offsetof. */
2768 if (op0 != error_mark_node
2769 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2771 tree val = get_base_address (op0);
2772 if (val
2773 && INDIRECT_REF_P (val)
2774 && COMPLETE_TYPE_P (TREE_TYPE (val))
2775 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2777 val = TREE_OPERAND (val, 0);
2778 STRIP_NOPS (val);
2779 val = maybe_constant_value (val);
2780 if (TREE_CODE (val) == INTEGER_CST)
2781 return fold_offsetof (op0, TREE_TYPE (x));
2784 goto finish_unary;
2786 case REALPART_EXPR:
2787 case IMAGPART_EXPR:
2788 rval_ops = false;
2789 /* FALLTHRU */
2790 case CONJ_EXPR:
2791 case FIX_TRUNC_EXPR:
2792 case FLOAT_EXPR:
2793 case NEGATE_EXPR:
2794 case ABS_EXPR:
2795 case ABSU_EXPR:
2796 case BIT_NOT_EXPR:
2797 case TRUTH_NOT_EXPR:
2798 case FIXED_CONVERT_EXPR:
2799 unary:
2801 loc = EXPR_LOCATION (x);
2802 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2804 finish_unary:
2805 if (op0 != TREE_OPERAND (x, 0))
2807 if (op0 == error_mark_node)
2808 x = error_mark_node;
2809 else
2811 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2812 if (code == INDIRECT_REF
2813 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2815 TREE_READONLY (x) = TREE_READONLY (org_x);
2816 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2817 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2821 else
2822 x = fold (x);
2824 gcc_assert (TREE_CODE (x) != COND_EXPR
2825 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2826 break;
2828 case UNARY_PLUS_EXPR:
2829 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2830 if (op0 == error_mark_node)
2831 x = error_mark_node;
2832 else
2833 x = fold_convert (TREE_TYPE (x), op0);
2834 break;
2836 case POSTDECREMENT_EXPR:
2837 case POSTINCREMENT_EXPR:
2838 case INIT_EXPR:
2839 case PREDECREMENT_EXPR:
2840 case PREINCREMENT_EXPR:
2841 case COMPOUND_EXPR:
2842 case MODIFY_EXPR:
2843 rval_ops = false;
2844 /* FALLTHRU */
2845 case POINTER_PLUS_EXPR:
2846 case PLUS_EXPR:
2847 case POINTER_DIFF_EXPR:
2848 case MINUS_EXPR:
2849 case MULT_EXPR:
2850 case TRUNC_DIV_EXPR:
2851 case CEIL_DIV_EXPR:
2852 case FLOOR_DIV_EXPR:
2853 case ROUND_DIV_EXPR:
2854 case TRUNC_MOD_EXPR:
2855 case CEIL_MOD_EXPR:
2856 case ROUND_MOD_EXPR:
2857 case RDIV_EXPR:
2858 case EXACT_DIV_EXPR:
2859 case MIN_EXPR:
2860 case MAX_EXPR:
2861 case LSHIFT_EXPR:
2862 case RSHIFT_EXPR:
2863 case LROTATE_EXPR:
2864 case RROTATE_EXPR:
2865 case BIT_AND_EXPR:
2866 case BIT_IOR_EXPR:
2867 case BIT_XOR_EXPR:
2868 case TRUTH_AND_EXPR:
2869 case TRUTH_ANDIF_EXPR:
2870 case TRUTH_OR_EXPR:
2871 case TRUTH_ORIF_EXPR:
2872 case TRUTH_XOR_EXPR:
2873 case LT_EXPR: case LE_EXPR:
2874 case GT_EXPR: case GE_EXPR:
2875 case EQ_EXPR: case NE_EXPR:
2876 case UNORDERED_EXPR: case ORDERED_EXPR:
2877 case UNLT_EXPR: case UNLE_EXPR:
2878 case UNGT_EXPR: case UNGE_EXPR:
2879 case UNEQ_EXPR: case LTGT_EXPR:
2880 case RANGE_EXPR: case COMPLEX_EXPR:
2882 loc = EXPR_LOCATION (x);
2883 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2884 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1), flags);
2886 /* decltype(nullptr) has only one value, so optimize away all comparisons
2887 with that type right away, keeping them in the IL causes troubles for
2888 various optimizations. */
2889 if (COMPARISON_CLASS_P (org_x)
2890 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2891 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2893 switch (code)
2895 case EQ_EXPR:
2896 x = constant_boolean_node (true, TREE_TYPE (x));
2897 break;
2898 case NE_EXPR:
2899 x = constant_boolean_node (false, TREE_TYPE (x));
2900 break;
2901 default:
2902 gcc_unreachable ();
2904 return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2905 op0, op1);
2908 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2910 if (op0 == error_mark_node || op1 == error_mark_node)
2911 x = error_mark_node;
2912 else
2913 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2915 else
2916 x = fold (x);
2918 /* This is only needed for -Wnonnull-compare and only if
2919 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2920 generation, we do it always. */
2921 if (COMPARISON_CLASS_P (org_x))
2923 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2925 else if (COMPARISON_CLASS_P (x))
2927 if (warn_nonnull_compare
2928 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2929 suppress_warning (x, OPT_Wnonnull_compare);
2931 /* Otherwise give up on optimizing these, let GIMPLE folders
2932 optimize those later on. */
2933 else if (op0 != TREE_OPERAND (org_x, 0)
2934 || op1 != TREE_OPERAND (org_x, 1))
2936 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2937 if (warn_nonnull_compare
2938 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2939 suppress_warning (x, OPT_Wnonnull_compare);
2941 else
2942 x = org_x;
2945 break;
2947 case VEC_COND_EXPR:
2948 case COND_EXPR:
2949 loc = EXPR_LOCATION (x);
2950 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2951 op1 = cp_fold (TREE_OPERAND (x, 1), flags);
2952 op2 = cp_fold (TREE_OPERAND (x, 2), flags);
2954 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2956 warning_sentinel s (warn_int_in_bool_context);
2957 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2958 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2959 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2960 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2962 else if (VOID_TYPE_P (TREE_TYPE (x)))
2964 if (TREE_CODE (op0) == INTEGER_CST)
2966 /* If the condition is constant, fold can fold away
2967 the COND_EXPR. If some statement-level uses of COND_EXPR
2968 have one of the branches NULL, avoid folding crash. */
2969 if (!op1)
2970 op1 = build_empty_stmt (loc);
2971 if (!op2)
2972 op2 = build_empty_stmt (loc);
2974 else
2976 /* Otherwise, don't bother folding a void condition, since
2977 it can't produce a constant value. */
2978 if (op0 != TREE_OPERAND (x, 0)
2979 || op1 != TREE_OPERAND (x, 1)
2980 || op2 != TREE_OPERAND (x, 2))
2981 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2982 break;
2986 if (op0 != TREE_OPERAND (x, 0)
2987 || op1 != TREE_OPERAND (x, 1)
2988 || op2 != TREE_OPERAND (x, 2))
2990 if (op0 == error_mark_node
2991 || op1 == error_mark_node
2992 || op2 == error_mark_node)
2993 x = error_mark_node;
2994 else
2995 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2997 else
2998 x = fold (x);
3000 /* A COND_EXPR might have incompatible types in branches if one or both
3001 arms are bitfields. If folding exposed such a branch, fix it up. */
3002 if (TREE_CODE (x) != code
3003 && x != error_mark_node
3004 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
3005 x = fold_convert (TREE_TYPE (org_x), x);
3007 break;
3009 case CALL_EXPR:
3011 tree callee = get_callee_fndecl (x);
3013 /* "Inline" calls to std::move/forward and other cast-like functions
3014 by simply folding them into a corresponding cast to their return
3015 type. This is cheaper than relying on the middle end to do so, and
3016 also means we avoid generating useless debug info for them at all.
3018 At this point the argument has already been converted into a
3019 reference, so it suffices to use a NOP_EXPR to express the
3020 cast. */
3021 if ((OPTION_SET_P (flag_fold_simple_inlines)
3022 ? flag_fold_simple_inlines
3023 : !flag_no_inline)
3024 && call_expr_nargs (x) == 1
3025 && decl_in_std_namespace_p (callee)
3026 && DECL_NAME (callee) != NULL_TREE
3027 && (id_equal (DECL_NAME (callee), "move")
3028 || id_equal (DECL_NAME (callee), "forward")
3029 || id_equal (DECL_NAME (callee), "addressof")
3030 /* This addressof equivalent is used heavily in libstdc++. */
3031 || id_equal (DECL_NAME (callee), "__addressof")
3032 || id_equal (DECL_NAME (callee), "as_const")))
3034 r = CALL_EXPR_ARG (x, 0);
3035 /* Check that the return and argument types are sane before
3036 folding. */
3037 if (INDIRECT_TYPE_P (TREE_TYPE (x))
3038 && INDIRECT_TYPE_P (TREE_TYPE (r)))
3040 if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
3041 r = build_nop (TREE_TYPE (x), r);
3042 x = cp_fold (r, flags);
3043 break;
3047 int sv = optimize, nw = sv;
3049 /* Some built-in function calls will be evaluated at compile-time in
3050 fold (). Set optimize to 1 when folding __builtin_constant_p inside
3051 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3052 if (callee && fndecl_built_in_p (callee) && !optimize
3053 && DECL_IS_BUILTIN_CONSTANT_P (callee)
3054 && current_function_decl
3055 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
3056 nw = 1;
3058 if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
3060 iloc_sentinel ils (EXPR_LOCATION (x));
3061 switch (DECL_FE_FUNCTION_CODE (callee))
3063 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
3064 /* Defer folding __builtin_is_constant_evaluated unless
3065 we know this isn't a manifestly constant-evaluated
3066 context. */
3067 if (flags & ff_mce_false)
3068 x = boolean_false_node;
3069 break;
3070 case CP_BUILT_IN_SOURCE_LOCATION:
3071 x = fold_builtin_source_location (x);
3072 break;
3073 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
3074 x = fold_builtin_is_corresponding_member
3075 (EXPR_LOCATION (x), call_expr_nargs (x),
3076 &CALL_EXPR_ARG (x, 0));
3077 break;
3078 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
3079 x = fold_builtin_is_pointer_inverconvertible_with_class
3080 (EXPR_LOCATION (x), call_expr_nargs (x),
3081 &CALL_EXPR_ARG (x, 0));
3082 break;
3083 default:
3084 break;
3086 break;
3089 if (callee
3090 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
3091 BUILT_IN_FRONTEND))
3093 x = fold_builtin_source_location (x);
3094 break;
3097 bool changed = false;
3098 int m = call_expr_nargs (x);
3099 for (int i = 0; i < m; i++)
3101 r = cp_fold (CALL_EXPR_ARG (x, i), flags);
3102 if (r != CALL_EXPR_ARG (x, i))
3104 if (r == error_mark_node)
3106 x = error_mark_node;
3107 break;
3109 if (!changed)
3110 x = copy_node (x);
3111 CALL_EXPR_ARG (x, i) = r;
3112 changed = true;
3115 if (x == error_mark_node)
3116 break;
3118 optimize = nw;
3119 r = fold (x);
3120 optimize = sv;
3122 if (TREE_CODE (r) != CALL_EXPR)
3124 x = cp_fold (r, flags);
3125 break;
3128 optimize = nw;
3130 /* Invoke maybe_constant_value for functions declared
3131 constexpr and not called with AGGR_INIT_EXPRs.
3132 TODO:
3133 Do constexpr expansion of expressions where the call itself is not
3134 constant, but the call followed by an INDIRECT_REF is. */
3135 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
3136 && !flag_no_inline)
3138 mce_value manifestly_const_eval = mce_unknown;
3139 if (flags & ff_mce_false)
3140 /* Allow folding __builtin_is_constant_evaluated to false during
3141 constexpr evaluation of this call. */
3142 manifestly_const_eval = mce_false;
3143 r = maybe_constant_value (x, /*decl=*/NULL_TREE,
3144 manifestly_const_eval);
3146 optimize = sv;
3148 if (TREE_CODE (r) != CALL_EXPR)
3150 if (DECL_CONSTRUCTOR_P (callee))
3152 loc = EXPR_LOCATION (x);
3153 tree s = build_fold_indirect_ref_loc (loc,
3154 CALL_EXPR_ARG (x, 0));
3155 r = cp_build_init_expr (s, r);
3157 x = r;
3158 break;
3161 break;
3164 case CONSTRUCTOR:
3166 unsigned i;
3167 constructor_elt *p;
3168 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
3169 vec<constructor_elt, va_gc> *nelts = NULL;
3170 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
3172 tree op = cp_fold (p->value, flags);
3173 if (op != p->value)
3175 if (op == error_mark_node)
3177 x = error_mark_node;
3178 vec_free (nelts);
3179 break;
3181 if (nelts == NULL)
3182 nelts = elts->copy ();
3183 (*nelts)[i].value = op;
3186 if (nelts)
3188 x = build_constructor (TREE_TYPE (x), nelts);
3189 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
3190 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
3191 CONSTRUCTOR_MUTABLE_POISON (x)
3192 = CONSTRUCTOR_MUTABLE_POISON (org_x);
3194 if (VECTOR_TYPE_P (TREE_TYPE (x)))
3195 x = fold (x);
3196 break;
3198 case TREE_VEC:
3200 bool changed = false;
3201 int n = TREE_VEC_LENGTH (x);
3203 for (int i = 0; i < n; i++)
3205 tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
3206 if (op != TREE_VEC_ELT (x, i))
3208 if (!changed)
3209 x = copy_node (x);
3210 TREE_VEC_ELT (x, i) = op;
3211 changed = true;
3216 break;
3218 case ARRAY_REF:
3219 case ARRAY_RANGE_REF:
3221 loc = EXPR_LOCATION (x);
3222 op0 = cp_fold (TREE_OPERAND (x, 0), flags);
3223 op1 = cp_fold (TREE_OPERAND (x, 1), flags);
3224 op2 = cp_fold (TREE_OPERAND (x, 2), flags);
3225 op3 = cp_fold (TREE_OPERAND (x, 3), flags);
3227 if (op0 != TREE_OPERAND (x, 0)
3228 || op1 != TREE_OPERAND (x, 1)
3229 || op2 != TREE_OPERAND (x, 2)
3230 || op3 != TREE_OPERAND (x, 3))
3232 if (op0 == error_mark_node
3233 || op1 == error_mark_node
3234 || op2 == error_mark_node
3235 || op3 == error_mark_node)
3236 x = error_mark_node;
3237 else
3239 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3240 TREE_READONLY (x) = TREE_READONLY (org_x);
3241 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3242 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3246 x = fold (x);
3247 break;
3249 case SAVE_EXPR:
3250 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3251 folding, evaluates to an invariant. In that case no need to wrap
3252 this folded tree with a SAVE_EXPR. */
3253 r = cp_fold (TREE_OPERAND (x, 0), flags);
3254 if (tree_invariant_p (r))
3255 x = r;
3256 break;
3258 case REQUIRES_EXPR:
3259 x = evaluate_requires_expr (x);
3260 break;
3262 default:
3263 return org_x;
3266 if (EXPR_P (x) && TREE_CODE (x) == code)
3268 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3269 copy_warning (x, org_x);
3272 if (!c.evaluation_restricted_p ())
3274 fold_cache->put (org_x, x);
3275 /* Prevent that we try to fold an already folded result again. */
3276 if (x != org_x)
3277 fold_cache->put (x, x);
3280 return x;
3283 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3285 tree
3286 lookup_hotness_attribute (tree list)
3288 for (; list; list = TREE_CHAIN (list))
3290 tree name = get_attribute_name (list);
3291 if ((is_attribute_p ("hot", name)
3292 || is_attribute_p ("cold", name)
3293 || is_attribute_p ("likely", name)
3294 || is_attribute_p ("unlikely", name))
3295 && is_attribute_namespace_p ("", list))
3296 break;
3298 return list;
3301 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3303 static tree
3304 remove_hotness_attribute (tree list)
3306 for (tree *p = &list; *p; )
3308 tree l = *p;
3309 tree name = get_attribute_name (l);
3310 if ((is_attribute_p ("hot", name)
3311 || is_attribute_p ("cold", name)
3312 || is_attribute_p ("likely", name)
3313 || is_attribute_p ("unlikely", name))
3314 && is_attribute_namespace_p ("", l))
3316 *p = TREE_CHAIN (l);
3317 continue;
3319 p = &TREE_CHAIN (l);
3321 return list;
3324 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3325 PREDICT_EXPR. */
3327 tree
3328 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3330 if (std_attrs == error_mark_node)
3331 return std_attrs;
3332 if (tree attr = lookup_hotness_attribute (std_attrs))
3334 tree name = get_attribute_name (attr);
3335 bool hot = (is_attribute_p ("hot", name)
3336 || is_attribute_p ("likely", name));
3337 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3338 hot ? TAKEN : NOT_TAKEN);
3339 SET_EXPR_LOCATION (pred, attrs_loc);
3340 add_stmt (pred);
3341 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3342 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3343 get_attribute_name (other), name);
3344 std_attrs = remove_hotness_attribute (std_attrs);
3346 return std_attrs;
3349 /* Build IFN_ASSUME internal call for assume condition ARG. */
3351 tree
3352 build_assume_call (location_t loc, tree arg)
3354 if (!processing_template_decl)
3355 arg = fold_build_cleanup_point_expr (TREE_TYPE (arg), arg);
3356 return build_call_expr_internal_loc (loc, IFN_ASSUME, void_type_node,
3357 1, arg);
3360 /* If [[assume (cond)]] appears on this statement, handle it. */
3362 tree
3363 process_stmt_assume_attribute (tree std_attrs, tree statement,
3364 location_t attrs_loc)
3366 if (std_attrs == error_mark_node)
3367 return std_attrs;
3368 tree attr = lookup_attribute ("gnu", "assume", std_attrs);
3369 if (!attr)
3370 return std_attrs;
3371 /* The next token after the assume attribute is not ';'. */
3372 if (statement)
3374 warning_at (attrs_loc, OPT_Wattributes,
3375 "%<assume%> attribute not followed by %<;%>");
3376 attr = NULL_TREE;
3378 for (; attr; attr = lookup_attribute ("gnu", "assume", TREE_CHAIN (attr)))
3380 tree args = TREE_VALUE (attr);
3381 if (args && PACK_EXPANSION_P (args))
3383 auto_diagnostic_group d;
3384 error_at (attrs_loc, "pack expansion of %qE attribute",
3385 get_attribute_name (attr));
3386 if (cxx_dialect >= cxx17)
3387 inform (attrs_loc, "use fold expression in the attribute "
3388 "argument instead");
3389 continue;
3391 int nargs = list_length (args);
3392 if (nargs != 1)
3394 auto_diagnostic_group d;
3395 error_at (attrs_loc, "wrong number of arguments specified for "
3396 "%qE attribute", get_attribute_name (attr));
3397 inform (attrs_loc, "expected %i, found %i", 1, nargs);
3399 else
3401 tree arg = TREE_VALUE (args);
3402 if (!type_dependent_expression_p (arg))
3403 arg = contextual_conv_bool (arg, tf_warning_or_error);
3404 if (error_operand_p (arg))
3405 continue;
3406 finish_expr_stmt (build_assume_call (attrs_loc, arg));
3409 return remove_attribute ("gnu", "assume", std_attrs);
3412 /* Return the type std::source_location::__impl after performing
3413 verification on it. */
3415 tree
3416 get_source_location_impl_type ()
3418 tree name = get_identifier ("source_location");
3419 tree decl = lookup_qualified_name (std_node, name);
3420 if (TREE_CODE (decl) != TYPE_DECL)
3422 auto_diagnostic_group d;
3423 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3424 qualified_name_lookup_error (std_node, name, decl, input_location);
3425 else
3426 error ("%qD is not a type", decl);
3427 return error_mark_node;
3429 name = get_identifier ("__impl");
3430 tree type = TREE_TYPE (decl);
3431 decl = lookup_qualified_name (type, name);
3432 if (TREE_CODE (decl) != TYPE_DECL)
3434 auto_diagnostic_group d;
3435 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3436 qualified_name_lookup_error (type, name, decl, input_location);
3437 else
3438 error ("%qD is not a type", decl);
3439 return error_mark_node;
3441 type = TREE_TYPE (decl);
3442 if (TREE_CODE (type) != RECORD_TYPE)
3444 error ("%qD is not a class type", decl);
3445 return error_mark_node;
3448 int cnt = 0;
3449 for (tree field = TYPE_FIELDS (type);
3450 (field = next_aggregate_field (field)) != NULL_TREE;
3451 field = DECL_CHAIN (field))
3453 if (DECL_NAME (field) != NULL_TREE)
3455 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3456 if (strcmp (n, "_M_file_name") == 0
3457 || strcmp (n, "_M_function_name") == 0)
3459 if (TREE_TYPE (field) != const_string_type_node)
3461 error ("%qD does not have %<const char *%> type", field);
3462 return error_mark_node;
3464 cnt++;
3465 continue;
3467 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3469 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3471 error ("%qD does not have integral type", field);
3472 return error_mark_node;
3474 cnt++;
3475 continue;
3478 cnt = 0;
3479 break;
3481 if (cnt != 4)
3483 error ("%<std::source_location::__impl%> does not contain only "
3484 "non-static data members %<_M_file_name%>, "
3485 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3486 return error_mark_node;
3488 return build_qualified_type (type, TYPE_QUAL_CONST);
3491 /* Type for source_location_table hash_set. */
3492 struct GTY((for_user)) source_location_table_entry {
3493 location_t loc;
3494 unsigned uid;
3495 tree var;
3498 /* Traits class for function start hash maps below. */
3500 struct source_location_table_entry_hash
3501 : ggc_remove <source_location_table_entry>
3503 typedef source_location_table_entry value_type;
3504 typedef source_location_table_entry compare_type;
3506 static hashval_t
3507 hash (const source_location_table_entry &ref)
3509 inchash::hash hstate (0);
3510 hstate.add_int (ref.loc);
3511 hstate.add_int (ref.uid);
3512 return hstate.end ();
3515 static bool
3516 equal (const source_location_table_entry &ref1,
3517 const source_location_table_entry &ref2)
3519 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3522 static void
3523 mark_deleted (source_location_table_entry &ref)
3525 ref.loc = UNKNOWN_LOCATION;
3526 ref.uid = -1U;
3527 ref.var = NULL_TREE;
3530 static const bool empty_zero_p = true;
3532 static void
3533 mark_empty (source_location_table_entry &ref)
3535 ref.loc = UNKNOWN_LOCATION;
3536 ref.uid = 0;
3537 ref.var = NULL_TREE;
3540 static bool
3541 is_deleted (const source_location_table_entry &ref)
3543 return (ref.loc == UNKNOWN_LOCATION
3544 && ref.uid == -1U
3545 && ref.var == NULL_TREE);
3548 static bool
3549 is_empty (const source_location_table_entry &ref)
3551 return (ref.loc == UNKNOWN_LOCATION
3552 && ref.uid == 0
3553 && ref.var == NULL_TREE);
3556 static void
3557 pch_nx (source_location_table_entry &p)
3559 extern void gt_pch_nx (source_location_table_entry &);
3560 gt_pch_nx (p);
3563 static void
3564 pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3566 extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3567 void *);
3568 gt_pch_nx (&p, op, cookie);
3572 static GTY(()) hash_table <source_location_table_entry_hash>
3573 *source_location_table;
3574 static GTY(()) unsigned int source_location_id;
3576 /* Fold the __builtin_source_location () call T. */
3578 tree
3579 fold_builtin_source_location (const_tree t)
3581 gcc_assert (TREE_CODE (t) == CALL_EXPR);
3582 /* TREE_TYPE (t) is const std::source_location::__impl* */
3583 tree source_location_impl = TREE_TYPE (TREE_TYPE (t));
3584 if (source_location_impl == error_mark_node)
3585 return build_zero_cst (const_ptr_type_node);
3586 gcc_assert (CLASS_TYPE_P (source_location_impl)
3587 && id_equal (TYPE_IDENTIFIER (source_location_impl), "__impl"));
3589 location_t loc = EXPR_LOCATION (t);
3590 if (source_location_table == NULL)
3591 source_location_table
3592 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3593 const line_map_ordinary *map;
3594 source_location_table_entry entry;
3595 entry.loc
3596 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3597 &map);
3598 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3599 entry.var = error_mark_node;
3600 source_location_table_entry *entryp
3601 = source_location_table->find_slot (entry, INSERT);
3602 tree var;
3603 if (entryp->var)
3604 var = entryp->var;
3605 else
3607 char tmp_name[32];
3608 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3609 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3610 source_location_impl);
3611 TREE_STATIC (var) = 1;
3612 TREE_PUBLIC (var) = 0;
3613 DECL_ARTIFICIAL (var) = 1;
3614 DECL_IGNORED_P (var) = 1;
3615 DECL_EXTERNAL (var) = 0;
3616 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3617 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3618 layout_decl (var, 0);
3620 vec<constructor_elt, va_gc> *v = NULL;
3621 vec_alloc (v, 4);
3622 for (tree field = TYPE_FIELDS (source_location_impl);
3623 (field = next_aggregate_field (field)) != NULL_TREE;
3624 field = DECL_CHAIN (field))
3626 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3627 tree val = NULL_TREE;
3628 if (strcmp (n, "_M_file_name") == 0)
3630 if (const char *fname = LOCATION_FILE (loc))
3632 fname = remap_macro_filename (fname);
3633 val = build_string_literal (fname);
3635 else
3636 val = build_string_literal ("");
3638 else if (strcmp (n, "_M_function_name") == 0)
3640 const char *name = "";
3642 if (current_function_decl)
3643 name = cxx_printable_name (current_function_decl, 2);
3645 val = build_string_literal (name);
3647 else if (strcmp (n, "_M_line") == 0)
3648 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3649 else if (strcmp (n, "_M_column") == 0)
3650 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3651 else
3652 gcc_unreachable ();
3653 CONSTRUCTOR_APPEND_ELT (v, field, val);
3656 tree ctor = build_constructor (source_location_impl, v);
3657 TREE_CONSTANT (ctor) = 1;
3658 TREE_STATIC (ctor) = 1;
3659 DECL_INITIAL (var) = ctor;
3660 varpool_node::finalize_decl (var);
3661 *entryp = entry;
3662 entryp->var = var;
3665 return build_fold_addr_expr_with_type_loc (loc, var, TREE_TYPE (t));
3668 #include "gt-cp-cp-gimplify.h"