Require target lra in gcc.c-torture/compile/asmgoto-6.c
[official-gcc.git] / gcc / cp / cp-gimplify.cc
blobf57341977748dbef68355e492fc6d328fcea21a5
1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44 #include "opts.h"
46 /* Flags for cp_fold and cp_fold_r. */
48 enum fold_flags {
49 ff_none = 0,
50 /* Whether we're being called from cp_fold_function. */
51 ff_genericize = 1 << 0,
52 /* Whether we're folding a point where we know we're
53 definitely not in a manifestly constant-evaluated
54 context. */
55 ff_mce_false = 1 << 1,
58 using fold_flags_t = int;
60 struct cp_fold_data
62 hash_set<tree> pset;
63 fold_flags_t flags;
64 cp_fold_data (fold_flags_t flags): flags (flags) {}
67 /* Forward declarations. */
69 static tree cp_genericize_r (tree *, int *, void *);
70 static tree cp_fold_r (tree *, int *, void *);
71 static void cp_genericize_tree (tree*, bool);
72 static tree cp_fold (tree, fold_flags_t);
74 /* Genericize a TRY_BLOCK. */
76 static void
77 genericize_try_block (tree *stmt_p)
79 tree body = TRY_STMTS (*stmt_p);
80 tree cleanup = TRY_HANDLERS (*stmt_p);
82 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
85 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
87 static void
88 genericize_catch_block (tree *stmt_p)
90 tree type = HANDLER_TYPE (*stmt_p);
91 tree body = HANDLER_BODY (*stmt_p);
93 /* FIXME should the caught type go in TREE_TYPE? */
94 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
97 /* A terser interface for building a representation of an exception
98 specification. */
100 static tree
101 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
103 tree t;
105 /* FIXME should the allowed types go in TREE_TYPE? */
106 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
107 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
109 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
110 append_to_statement_list (body, &TREE_OPERAND (t, 0));
112 return t;
115 /* Genericize an EH_SPEC_BLOCK by converting it to a
116 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
118 static void
119 genericize_eh_spec_block (tree *stmt_p)
121 tree body = EH_SPEC_STMTS (*stmt_p);
122 tree allowed = EH_SPEC_RAISES (*stmt_p);
123 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
125 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
126 suppress_warning (*stmt_p);
127 suppress_warning (TREE_OPERAND (*stmt_p, 1));
130 /* Return the first non-compound statement in STMT. */
132 tree
133 first_stmt (tree stmt)
135 switch (TREE_CODE (stmt))
137 case STATEMENT_LIST:
138 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
139 return first_stmt (p->stmt);
140 return void_node;
142 case BIND_EXPR:
143 return first_stmt (BIND_EXPR_BODY (stmt));
145 default:
146 return stmt;
150 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
152 static void
153 genericize_if_stmt (tree *stmt_p)
155 tree stmt, cond, then_, else_;
156 location_t locus = EXPR_LOCATION (*stmt_p);
158 stmt = *stmt_p;
159 cond = IF_COND (stmt);
160 then_ = THEN_CLAUSE (stmt);
161 else_ = ELSE_CLAUSE (stmt);
163 if (then_ && else_)
165 tree ft = first_stmt (then_);
166 tree fe = first_stmt (else_);
167 br_predictor pr;
168 if (TREE_CODE (ft) == PREDICT_EXPR
169 && TREE_CODE (fe) == PREDICT_EXPR
170 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
171 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
173 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
174 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
175 warning_at (&richloc, OPT_Wattributes,
176 "both branches of %<if%> statement marked as %qs",
177 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
181 if (!then_)
182 then_ = build_empty_stmt (locus);
183 if (!else_)
184 else_ = build_empty_stmt (locus);
186 /* consteval if has been verified not to have the then_/else_ blocks
187 entered by gotos/case labels from elsewhere, and as then_ block
188 can contain unfolded immediate function calls, we have to discard
189 the then_ block regardless of whether else_ has side-effects or not. */
190 if (IF_STMT_CONSTEVAL_P (stmt))
192 if (block_may_fallthru (then_))
193 stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
194 void_node, else_);
195 else
196 stmt = else_;
198 else if (IF_STMT_CONSTEXPR_P (stmt))
199 stmt = integer_nonzerop (cond) ? then_ : else_;
200 /* ??? This optimization doesn't seem to belong here, but removing it
201 causes -Wreturn-type regressions (e.g. 107310). */
202 else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
203 stmt = then_;
204 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
205 stmt = else_;
206 else
207 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
208 protected_set_expr_location_if_unset (stmt, locus);
209 *stmt_p = stmt;
212 /* Hook into the middle of gimplifying an OMP_FOR node. */
214 static enum gimplify_status
215 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
217 tree for_stmt = *expr_p;
218 gimple_seq seq = NULL;
220 /* Protect ourselves from recursion. */
221 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
222 return GS_UNHANDLED;
223 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
225 gimplify_and_add (for_stmt, &seq);
226 gimple_seq_add_seq (pre_p, seq);
228 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
230 return GS_ALL_DONE;
233 /* Gimplify an EXPR_STMT node. */
235 static void
236 gimplify_expr_stmt (tree *stmt_p)
238 tree stmt = EXPR_STMT_EXPR (*stmt_p);
240 if (stmt == error_mark_node)
241 stmt = NULL;
243 /* Gimplification of a statement expression will nullify the
244 statement if all its side effects are moved to *PRE_P and *POST_P.
246 In this case we will not want to emit the gimplified statement.
247 However, we may still want to emit a warning, so we do that before
248 gimplification. */
249 if (stmt && warn_unused_value)
251 if (!TREE_SIDE_EFFECTS (stmt))
253 if (!IS_EMPTY_STMT (stmt)
254 && !VOID_TYPE_P (TREE_TYPE (stmt))
255 && !warning_suppressed_p (stmt, OPT_Wunused_value))
256 warning (OPT_Wunused_value, "statement with no effect");
258 else
259 warn_if_unused_value (stmt, input_location);
262 if (stmt == NULL_TREE)
263 stmt = alloc_stmt_list ();
265 *stmt_p = stmt;
268 /* Gimplify initialization from an AGGR_INIT_EXPR. */
270 static void
271 cp_gimplify_init_expr (tree *expr_p)
273 tree from = TREE_OPERAND (*expr_p, 1);
274 tree to = TREE_OPERAND (*expr_p, 0);
275 tree t;
277 if (TREE_CODE (from) == TARGET_EXPR)
278 if (tree init = TARGET_EXPR_INITIAL (from))
280 /* Make sure that we expected to elide this temporary. But also allow
281 gimplify_modify_expr_rhs to elide temporaries of trivial type. */
282 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from)
283 || !TREE_ADDRESSABLE (TREE_TYPE (from)));
284 if (target_expr_needs_replace (from))
286 /* If this was changed by cp_genericize_target_expr, we need to
287 walk into it to replace uses of the slot. */
288 replace_decl (&init, TARGET_EXPR_SLOT (from), to);
289 *expr_p = init;
290 return;
292 else
293 from = init;
296 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
297 inside the TARGET_EXPR. */
298 for (t = from; t; )
300 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
302 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
303 replace the slot operand with our target.
305 Should we add a target parm to gimplify_expr instead? No, as in this
306 case we want to replace the INIT_EXPR. */
307 if (TREE_CODE (sub) == AGGR_INIT_EXPR
308 || TREE_CODE (sub) == VEC_INIT_EXPR)
310 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
311 AGGR_INIT_EXPR_SLOT (sub) = to;
312 else
313 VEC_INIT_EXPR_SLOT (sub) = to;
314 *expr_p = from;
316 /* The initialization is now a side-effect, so the container can
317 become void. */
318 if (from != sub)
319 TREE_TYPE (from) = void_type_node;
322 /* Handle aggregate NSDMI. */
323 replace_placeholders (sub, to);
325 if (t == sub)
326 break;
327 else
328 t = TREE_OPERAND (t, 1);
333 /* Gimplify a MUST_NOT_THROW_EXPR. */
335 static enum gimplify_status
336 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
338 tree stmt = *expr_p;
339 tree temp = voidify_wrapper_expr (stmt, NULL);
340 tree body = TREE_OPERAND (stmt, 0);
341 gimple_seq try_ = NULL;
342 gimple_seq catch_ = NULL;
343 gimple *mnt;
345 gimplify_and_add (body, &try_);
346 mnt = gimple_build_eh_must_not_throw (call_terminate_fn);
347 gimple_seq_add_stmt_without_update (&catch_, mnt);
348 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
350 gimple_seq_add_stmt_without_update (pre_p, mnt);
351 if (temp)
353 *expr_p = temp;
354 return GS_OK;
357 *expr_p = NULL;
358 return GS_ALL_DONE;
361 /* Return TRUE if an operand (OP) of a given TYPE being copied is
362 really just an empty class copy.
364 Check that the operand has a simple form so that TARGET_EXPRs and
365 non-empty CONSTRUCTORs get reduced properly, and we leave the
366 return slot optimization alone because it isn't a copy. */
368 bool
369 simple_empty_class_p (tree type, tree op, tree_code code)
371 if (TREE_CODE (op) == COMPOUND_EXPR)
372 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
373 if (SIMPLE_TARGET_EXPR_P (op)
374 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
375 /* The TARGET_EXPR is itself a simple copy, look through it. */
376 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
378 if (TREE_CODE (op) == PARM_DECL
379 && TREE_ADDRESSABLE (TREE_TYPE (op)))
381 tree fn = DECL_CONTEXT (op);
382 if (DECL_THUNK_P (fn)
383 || lambda_static_thunk_p (fn))
384 /* In a thunk, we pass through invisible reference parms, so this isn't
385 actually a copy. */
386 return false;
389 return
390 (TREE_CODE (op) == EMPTY_CLASS_EXPR
391 || code == MODIFY_EXPR
392 || is_gimple_lvalue (op)
393 || INDIRECT_REF_P (op)
394 || (TREE_CODE (op) == CONSTRUCTOR
395 && CONSTRUCTOR_NELTS (op) == 0)
396 || (TREE_CODE (op) == CALL_EXPR
397 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
398 && !TREE_CLOBBER_P (op)
399 && is_really_empty_class (type, /*ignore_vptr*/true);
402 /* Returns true if evaluating E as an lvalue has side-effects;
403 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
404 have side-effects until there is a read or write through it. */
406 static bool
407 lvalue_has_side_effects (tree e)
409 if (!TREE_SIDE_EFFECTS (e))
410 return false;
411 while (handled_component_p (e))
413 if (TREE_CODE (e) == ARRAY_REF
414 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
415 return true;
416 e = TREE_OPERAND (e, 0);
418 if (DECL_P (e))
419 /* Just naming a variable has no side-effects. */
420 return false;
421 else if (INDIRECT_REF_P (e))
422 /* Similarly, indirection has no side-effects. */
423 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
424 else
425 /* For anything else, trust TREE_SIDE_EFFECTS. */
426 return TREE_SIDE_EFFECTS (e);
429 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
430 by expressions with side-effects in other operands. */
432 static enum gimplify_status
433 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
434 bool (*gimple_test_f) (tree))
436 enum gimplify_status t
437 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
438 if (t == GS_ERROR)
439 return GS_ERROR;
440 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
441 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
442 return t;
445 /* Like gimplify_arg, but if ORDERED is set (which should be set if
446 any of the arguments this argument is sequenced before has
447 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
448 are gimplified into SSA_NAME or a fresh temporary and for
449 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
451 static enum gimplify_status
452 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
453 bool ordered)
455 enum gimplify_status t;
456 if (ordered
457 && !is_gimple_reg_type (TREE_TYPE (*arg_p))
458 && TREE_CODE (*arg_p) == TARGET_EXPR)
460 /* gimplify_arg would strip away the TARGET_EXPR, but
461 that can mean we don't copy the argument and some following
462 argument with side-effect could modify it. */
463 protected_set_expr_location (*arg_p, call_location);
464 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
466 else
468 t = gimplify_arg (arg_p, pre_p, call_location);
469 if (t == GS_ERROR)
470 return GS_ERROR;
471 else if (ordered
472 && is_gimple_reg_type (TREE_TYPE (*arg_p))
473 && is_gimple_variable (*arg_p)
474 && TREE_CODE (*arg_p) != SSA_NAME
475 /* No need to force references into register, references
476 can't be modified. */
477 && !TYPE_REF_P (TREE_TYPE (*arg_p))
478 /* And this can't be modified either. */
479 && *arg_p != current_class_ptr)
480 *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
481 return t;
486 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
489 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
491 int saved_stmts_are_full_exprs_p = 0;
492 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
493 enum tree_code code = TREE_CODE (*expr_p);
494 enum gimplify_status ret;
496 if (STATEMENT_CODE_P (code))
498 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
499 current_stmt_tree ()->stmts_are_full_exprs_p
500 = STMT_IS_FULL_EXPR_P (*expr_p);
503 switch (code)
505 case AGGR_INIT_EXPR:
506 simplify_aggr_init_expr (expr_p);
507 ret = GS_OK;
508 break;
510 case VEC_INIT_EXPR:
512 *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
513 tf_warning_or_error);
515 cp_fold_data data (ff_genericize | ff_mce_false);
516 cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
517 cp_genericize_tree (expr_p, false);
518 copy_if_shared (expr_p);
519 ret = GS_OK;
521 break;
523 case THROW_EXPR:
524 /* FIXME communicate throw type to back end, probably by moving
525 THROW_EXPR into ../tree.def. */
526 *expr_p = TREE_OPERAND (*expr_p, 0);
527 ret = GS_OK;
528 break;
530 case MUST_NOT_THROW_EXPR:
531 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
532 break;
534 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
535 LHS of an assignment might also be involved in the RHS, as in bug
536 25979. */
537 case INIT_EXPR:
538 cp_gimplify_init_expr (expr_p);
539 if (TREE_CODE (*expr_p) != INIT_EXPR)
540 return GS_OK;
541 /* Fall through. */
542 case MODIFY_EXPR:
543 modify_expr_case:
545 /* If the back end isn't clever enough to know that the lhs and rhs
546 types are the same, add an explicit conversion. */
547 tree op0 = TREE_OPERAND (*expr_p, 0);
548 tree op1 = TREE_OPERAND (*expr_p, 1);
550 if (!error_operand_p (op0)
551 && !error_operand_p (op1)
552 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
553 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
554 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
555 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
556 TREE_TYPE (op0), op1);
558 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
560 while (TREE_CODE (op1) == TARGET_EXPR)
561 /* We're disconnecting the initializer from its target,
562 don't create a temporary. */
563 op1 = TARGET_EXPR_INITIAL (op1);
565 /* Remove any copies of empty classes. Also drop volatile
566 variables on the RHS to avoid infinite recursion from
567 gimplify_expr trying to load the value. */
568 if (TREE_SIDE_EFFECTS (op1))
570 if (TREE_THIS_VOLATILE (op1)
571 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
572 op1 = build_fold_addr_expr (op1);
574 gimplify_and_add (op1, pre_p);
576 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
577 is_gimple_lvalue, fb_lvalue);
578 *expr_p = TREE_OPERAND (*expr_p, 0);
579 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
580 /* Avoid 'return *<retval>;' */
581 *expr_p = TREE_OPERAND (*expr_p, 0);
583 /* P0145 says that the RHS is sequenced before the LHS.
584 gimplify_modify_expr gimplifies the RHS before the LHS, but that
585 isn't quite strong enough in two cases:
587 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
588 mean it's evaluated after the LHS.
590 2) the value calculation of the RHS is also sequenced before the
591 LHS, so for scalar assignment we need to preevaluate if the
592 RHS could be affected by LHS side-effects even if it has no
593 side-effects of its own. We don't need this for classes because
594 class assignment takes its RHS by reference. */
595 else if (flag_strong_eval_order > 1
596 && TREE_CODE (*expr_p) == MODIFY_EXPR
597 && lvalue_has_side_effects (op0)
598 && (TREE_CODE (op1) == CALL_EXPR
599 || (SCALAR_TYPE_P (TREE_TYPE (op1))
600 && !TREE_CONSTANT (op1))))
601 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
603 ret = GS_OK;
604 break;
606 case EMPTY_CLASS_EXPR:
607 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
608 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
609 ret = GS_OK;
610 break;
612 case BASELINK:
613 *expr_p = BASELINK_FUNCTIONS (*expr_p);
614 ret = GS_OK;
615 break;
617 case TRY_BLOCK:
618 genericize_try_block (expr_p);
619 ret = GS_OK;
620 break;
622 case HANDLER:
623 genericize_catch_block (expr_p);
624 ret = GS_OK;
625 break;
627 case EH_SPEC_BLOCK:
628 genericize_eh_spec_block (expr_p);
629 ret = GS_OK;
630 break;
632 case USING_STMT:
633 gcc_unreachable ();
635 case FOR_STMT:
636 case WHILE_STMT:
637 case DO_STMT:
638 case SWITCH_STMT:
639 case CONTINUE_STMT:
640 case BREAK_STMT:
641 gcc_unreachable ();
643 case OMP_FOR:
644 case OMP_SIMD:
645 case OMP_DISTRIBUTE:
646 case OMP_LOOP:
647 case OMP_TASKLOOP:
648 ret = cp_gimplify_omp_for (expr_p, pre_p);
649 break;
651 case EXPR_STMT:
652 gimplify_expr_stmt (expr_p);
653 ret = GS_OK;
654 break;
656 case UNARY_PLUS_EXPR:
658 tree arg = TREE_OPERAND (*expr_p, 0);
659 tree type = TREE_TYPE (*expr_p);
660 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
661 : arg;
662 ret = GS_OK;
664 break;
666 case CALL_EXPR:
667 ret = GS_OK;
668 if (flag_strong_eval_order == 2
669 && CALL_EXPR_FN (*expr_p)
670 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
671 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
673 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
674 enum gimplify_status t
675 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
676 is_gimple_call_addr);
677 if (t == GS_ERROR)
678 ret = GS_ERROR;
679 /* GIMPLE considers most pointer conversion useless, but for
680 calls we actually care about the exact function pointer type. */
681 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
682 CALL_EXPR_FN (*expr_p)
683 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
685 if (!CALL_EXPR_FN (*expr_p))
686 /* Internal function call. */;
687 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
689 /* This is a call to a (compound) assignment operator that used
690 the operator syntax; gimplify the RHS first. */
691 gcc_assert (call_expr_nargs (*expr_p) == 2);
692 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
693 enum gimplify_status t
694 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
695 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
696 if (t == GS_ERROR)
697 ret = GS_ERROR;
699 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
701 /* Leave the last argument for gimplify_call_expr, to avoid problems
702 with __builtin_va_arg_pack(). */
703 int nargs = call_expr_nargs (*expr_p) - 1;
704 int last_side_effects_arg = -1;
705 for (int i = nargs; i > 0; --i)
706 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
708 last_side_effects_arg = i;
709 break;
711 for (int i = 0; i < nargs; ++i)
713 enum gimplify_status t
714 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
715 i < last_side_effects_arg);
716 if (t == GS_ERROR)
717 ret = GS_ERROR;
720 else if (flag_strong_eval_order
721 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
723 /* If flag_strong_eval_order, evaluate the object argument first. */
724 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
725 if (INDIRECT_TYPE_P (fntype))
726 fntype = TREE_TYPE (fntype);
727 if (TREE_CODE (fntype) == METHOD_TYPE)
729 int nargs = call_expr_nargs (*expr_p);
730 bool side_effects = false;
731 for (int i = 1; i < nargs; ++i)
732 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
734 side_effects = true;
735 break;
737 enum gimplify_status t
738 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
739 side_effects);
740 if (t == GS_ERROR)
741 ret = GS_ERROR;
744 if (ret != GS_ERROR)
746 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
747 if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
748 switch (DECL_FE_FUNCTION_CODE (decl))
750 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
751 *expr_p = boolean_false_node;
752 break;
753 case CP_BUILT_IN_SOURCE_LOCATION:
754 *expr_p
755 = fold_builtin_source_location (*expr_p);
756 break;
757 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
758 *expr_p
759 = fold_builtin_is_corresponding_member
760 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
761 &CALL_EXPR_ARG (*expr_p, 0));
762 break;
763 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
764 *expr_p
765 = fold_builtin_is_pointer_inverconvertible_with_class
766 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
767 &CALL_EXPR_ARG (*expr_p, 0));
768 break;
769 default:
770 break;
773 break;
775 case TARGET_EXPR:
776 /* A TARGET_EXPR that expresses direct-initialization should have been
777 elided by cp_gimplify_init_expr. */
778 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
779 /* Likewise, but allow extra temps of trivial type so that
780 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
781 on the rhs of an assignment, as in constexpr-aggr1.C. */
782 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p)
783 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p)));
784 ret = GS_UNHANDLED;
785 break;
787 case PTRMEM_CST:
788 *expr_p = cplus_expand_constant (*expr_p);
789 if (TREE_CODE (*expr_p) == PTRMEM_CST)
790 ret = GS_ERROR;
791 else
792 ret = GS_OK;
793 break;
795 case RETURN_EXPR:
796 if (TREE_OPERAND (*expr_p, 0)
797 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
798 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
800 expr_p = &TREE_OPERAND (*expr_p, 0);
801 /* Avoid going through the INIT_EXPR case, which can
802 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
803 goto modify_expr_case;
805 /* Fall through. */
807 default:
808 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
809 break;
812 /* Restore saved state. */
813 if (STATEMENT_CODE_P (code))
814 current_stmt_tree ()->stmts_are_full_exprs_p
815 = saved_stmts_are_full_exprs_p;
817 return ret;
820 static inline bool
821 is_invisiref_parm (const_tree t)
823 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
824 && DECL_BY_REFERENCE (t));
827 /* A stable comparison routine for use with splay trees and DECLs. */
829 static int
830 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
832 tree a = (tree) xa;
833 tree b = (tree) xb;
835 return DECL_UID (a) - DECL_UID (b);
838 /* OpenMP context during genericization. */
840 struct cp_genericize_omp_taskreg
842 bool is_parallel;
843 bool default_shared;
844 struct cp_genericize_omp_taskreg *outer;
845 splay_tree variables;
848 /* Return true if genericization should try to determine if
849 DECL is firstprivate or shared within task regions. */
851 static bool
852 omp_var_to_track (tree decl)
854 tree type = TREE_TYPE (decl);
855 if (is_invisiref_parm (decl))
856 type = TREE_TYPE (type);
857 else if (TYPE_REF_P (type))
858 type = TREE_TYPE (type);
859 while (TREE_CODE (type) == ARRAY_TYPE)
860 type = TREE_TYPE (type);
861 if (type == error_mark_node || !CLASS_TYPE_P (type))
862 return false;
863 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
864 return false;
865 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
866 return false;
867 return true;
870 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
872 static void
873 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
875 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
876 (splay_tree_key) decl);
877 if (n == NULL)
879 int flags = OMP_CLAUSE_DEFAULT_SHARED;
880 if (omp_ctx->outer)
881 omp_cxx_notice_variable (omp_ctx->outer, decl);
882 if (!omp_ctx->default_shared)
884 struct cp_genericize_omp_taskreg *octx;
886 for (octx = omp_ctx->outer; octx; octx = octx->outer)
888 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
889 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
891 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
892 break;
894 if (octx->is_parallel)
895 break;
897 if (octx == NULL
898 && (TREE_CODE (decl) == PARM_DECL
899 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
900 && DECL_CONTEXT (decl) == current_function_decl)))
901 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
902 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
904 /* DECL is implicitly determined firstprivate in
905 the current task construct. Ensure copy ctor and
906 dtor are instantiated, because during gimplification
907 it will be already too late. */
908 tree type = TREE_TYPE (decl);
909 if (is_invisiref_parm (decl))
910 type = TREE_TYPE (type);
911 else if (TYPE_REF_P (type))
912 type = TREE_TYPE (type);
913 while (TREE_CODE (type) == ARRAY_TYPE)
914 type = TREE_TYPE (type);
915 get_copy_ctor (type, tf_none);
916 get_dtor (type, tf_none);
919 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
923 /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
924 not expected to elide, e.g. because unsafe_copy_elision_p is true. */
926 static bool
927 any_non_eliding_target_exprs (tree ctor)
929 for (const constructor_elt &e : *CONSTRUCTOR_ELTS (ctor))
931 if (TREE_CODE (e.value) == TARGET_EXPR
932 && !TARGET_EXPR_ELIDING_P (e.value))
933 return true;
935 return false;
938 /* If we might need to clean up a partially constructed object, break down the
939 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
940 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
941 the result. */
943 static void
944 cp_genericize_init (tree *replace, tree from, tree to)
946 tree init = NULL_TREE;
947 if (TREE_CODE (from) == VEC_INIT_EXPR)
948 init = expand_vec_init_expr (to, from, tf_warning_or_error);
949 else if (TREE_CODE (from) == CONSTRUCTOR
950 && TREE_SIDE_EFFECTS (from)
951 && ((flag_exceptions
952 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
953 || any_non_eliding_target_exprs (from)))
955 to = cp_stabilize_reference (to);
956 replace_placeholders (from, to);
957 init = split_nonconstant_init (to, from);
960 if (init)
962 if (*replace == from)
963 /* Make cp_gimplify_init_expr call replace_decl on this
964 TARGET_EXPR_INITIAL. */
965 init = fold_convert (void_type_node, init);
966 *replace = init;
970 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
972 static void
973 cp_genericize_init_expr (tree *stmt_p)
975 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
976 tree to = TREE_OPERAND (*stmt_p, 0);
977 tree from = TREE_OPERAND (*stmt_p, 1);
978 if (SIMPLE_TARGET_EXPR_P (from)
979 /* Return gets confused if we clobber its INIT_EXPR this soon. */
980 && TREE_CODE (to) != RESULT_DECL)
981 from = TARGET_EXPR_INITIAL (from);
982 cp_genericize_init (stmt_p, from, to);
985 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
986 replace_decl later when we know what we're initializing. */
988 static void
989 cp_genericize_target_expr (tree *stmt_p)
991 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
992 tree slot = TARGET_EXPR_SLOT (*stmt_p);
993 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
994 TARGET_EXPR_INITIAL (*stmt_p), slot);
995 gcc_assert (!DECL_INITIAL (slot));
998 /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
999 TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1000 replacement when cp_folding TARGET_EXPR to preserve the invariant that
1001 AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1003 bool
1004 maybe_replace_decl (tree *tp, tree decl, tree replacement)
1006 if (!*tp || !VOID_TYPE_P (TREE_TYPE (*tp)))
1007 return false;
1008 tree t = *tp;
1009 while (TREE_CODE (t) == COMPOUND_EXPR)
1010 t = TREE_OPERAND (t, 1);
1011 if (TREE_CODE (t) == AGGR_INIT_EXPR)
1012 replace_decl (&AGGR_INIT_EXPR_SLOT (t), decl, replacement);
1013 else if (TREE_CODE (t) == VEC_INIT_EXPR)
1014 replace_decl (&VEC_INIT_EXPR_SLOT (t), decl, replacement);
1015 else
1016 replace_decl (tp, decl, replacement);
1017 return true;
1020 /* Genericization context. */
1022 struct cp_genericize_data
1024 hash_set<tree> *p_set;
1025 auto_vec<tree> bind_expr_stack;
1026 struct cp_genericize_omp_taskreg *omp_ctx;
1027 tree try_block;
1028 bool no_sanitize_p;
1029 bool handle_invisiref_parm_p;
1032 /* Perform any pre-gimplification folding of C++ front end trees to
1033 GENERIC.
1034 Note: The folding of non-omp cases is something to move into
1035 the middle-end. As for now we have most foldings only on GENERIC
1036 in fold-const, we need to perform this before transformation to
1037 GIMPLE-form. */
1039 static tree
1040 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
1042 cp_fold_data *data = (cp_fold_data*)data_;
1043 tree stmt = *stmt_p;
1044 enum tree_code code = TREE_CODE (stmt);
1046 switch (code)
1048 case PTRMEM_CST:
1049 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
1050 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
1052 if (!data->pset.add (stmt))
1053 error_at (PTRMEM_CST_LOCATION (stmt),
1054 "taking address of an immediate function %qD",
1055 PTRMEM_CST_MEMBER (stmt));
1056 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1057 break;
1059 break;
1061 case ADDR_EXPR:
1062 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
1063 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
1065 error_at (EXPR_LOCATION (stmt),
1066 "taking address of an immediate function %qD",
1067 TREE_OPERAND (stmt, 0));
1068 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1069 break;
1071 break;
1073 default:
1074 break;
1077 *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
1079 if (data->pset.add (stmt))
1081 /* Don't walk subtrees of stmts we've already walked once, otherwise
1082 we can have exponential complexity with e.g. lots of nested
1083 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1084 always the same tree, which the first time cp_fold_r has been
1085 called on it had the subtrees walked. */
1086 *walk_subtrees = 0;
1087 return NULL;
1090 code = TREE_CODE (stmt);
1091 switch (code)
1093 tree x;
1094 int i, n;
1095 case OMP_FOR:
1096 case OMP_SIMD:
1097 case OMP_DISTRIBUTE:
1098 case OMP_LOOP:
1099 case OMP_TASKLOOP:
1100 case OACC_LOOP:
1101 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1102 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1103 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1104 x = OMP_FOR_COND (stmt);
1105 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1107 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1108 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1110 else if (x && TREE_CODE (x) == TREE_VEC)
1112 n = TREE_VEC_LENGTH (x);
1113 for (i = 0; i < n; i++)
1115 tree o = TREE_VEC_ELT (x, i);
1116 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1117 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1120 x = OMP_FOR_INCR (stmt);
1121 if (x && TREE_CODE (x) == TREE_VEC)
1123 n = TREE_VEC_LENGTH (x);
1124 for (i = 0; i < n; i++)
1126 tree o = TREE_VEC_ELT (x, i);
1127 if (o && TREE_CODE (o) == MODIFY_EXPR)
1128 o = TREE_OPERAND (o, 1);
1129 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1130 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1132 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1133 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1137 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1138 *walk_subtrees = 0;
1139 return NULL;
1141 case IF_STMT:
1142 if (IF_STMT_CONSTEVAL_P (stmt))
1144 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1145 boolean_false_node. */
1146 cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1147 cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1148 *walk_subtrees = 0;
1149 return NULL;
1151 break;
1153 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1154 here rather than in cp_genericize to avoid problems with the invisible
1155 reference transition. */
1156 case INIT_EXPR:
1157 if (data->flags & ff_genericize)
1158 cp_genericize_init_expr (stmt_p);
1159 break;
1161 case TARGET_EXPR:
1162 if (data->flags & ff_genericize)
1163 cp_genericize_target_expr (stmt_p);
1165 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1166 that case, strip it in favor of this one. */
1167 if (tree &init = TARGET_EXPR_INITIAL (stmt))
1169 cp_walk_tree (&init, cp_fold_r, data, NULL);
1170 cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt), cp_fold_r, data, NULL);
1171 *walk_subtrees = 0;
1172 if (TREE_CODE (init) == TARGET_EXPR)
1174 tree sub = TARGET_EXPR_INITIAL (init);
1175 maybe_replace_decl (&sub, TARGET_EXPR_SLOT (init),
1176 TARGET_EXPR_SLOT (stmt));
1177 init = sub;
1180 break;
1182 default:
1183 break;
1186 return NULL;
1189 /* Fold ALL the trees! FIXME we should be able to remove this, but
1190 apparently that still causes optimization regressions. */
1192 void
1193 cp_fold_function (tree fndecl)
1195 /* By now all manifestly-constant-evaluated expressions will have
1196 been constant-evaluated already if possible, so we can safely
1197 pass ff_mce_false. */
1198 cp_fold_data data (ff_genericize | ff_mce_false);
1199 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1202 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1204 static tree genericize_spaceship (tree expr)
1206 iloc_sentinel s (cp_expr_location (expr));
1207 tree type = TREE_TYPE (expr);
1208 tree op0 = TREE_OPERAND (expr, 0);
1209 tree op1 = TREE_OPERAND (expr, 1);
1210 return genericize_spaceship (input_location, type, op0, op1);
1213 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1214 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1215 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1216 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1218 tree
1219 predeclare_vla (tree expr)
1221 tree type = TREE_TYPE (expr);
1222 if (type == error_mark_node)
1223 return expr;
1224 if (is_typedef_decl (expr))
1225 type = DECL_ORIGINAL_TYPE (expr);
1227 /* We need to strip pointers for gimplify_type_sizes. */
1228 tree vla = type;
1229 while (POINTER_TYPE_P (vla))
1231 if (TYPE_NAME (vla))
1232 return expr;
1233 vla = TREE_TYPE (vla);
1235 if (vla == type || TYPE_NAME (vla)
1236 || !variably_modified_type_p (vla, NULL_TREE))
1237 return expr;
1239 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1240 DECL_ARTIFICIAL (decl) = 1;
1241 TYPE_NAME (vla) = decl;
1242 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1243 if (DECL_P (expr))
1245 add_stmt (dexp);
1246 return NULL_TREE;
1248 else
1250 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1251 return expr;
1255 /* Perform any pre-gimplification lowering of C++ front end trees to
1256 GENERIC. */
1258 static tree
1259 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1261 tree stmt = *stmt_p;
1262 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1263 hash_set<tree> *p_set = wtd->p_set;
1265 /* If in an OpenMP context, note var uses. */
1266 if (UNLIKELY (wtd->omp_ctx != NULL)
1267 && (VAR_P (stmt)
1268 || TREE_CODE (stmt) == PARM_DECL
1269 || TREE_CODE (stmt) == RESULT_DECL)
1270 && omp_var_to_track (stmt))
1271 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1273 /* Don't dereference parms in a thunk, pass the references through. */
1274 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1275 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1277 *walk_subtrees = 0;
1278 return NULL;
1281 /* Dereference invisible reference parms. */
1282 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1284 *stmt_p = convert_from_reference (stmt);
1285 p_set->add (*stmt_p);
1286 *walk_subtrees = 0;
1287 return NULL;
1290 /* Map block scope extern declarations to visible declarations with the
1291 same name and type in outer scopes if any. */
1292 if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1293 if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1295 if (alias != error_mark_node)
1297 *stmt_p = alias;
1298 TREE_USED (alias) |= TREE_USED (stmt);
1300 *walk_subtrees = 0;
1301 return NULL;
1304 if (TREE_CODE (stmt) == INTEGER_CST
1305 && TYPE_REF_P (TREE_TYPE (stmt))
1306 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1307 && !wtd->no_sanitize_p)
1309 ubsan_maybe_instrument_reference (stmt_p);
1310 if (*stmt_p != stmt)
1312 *walk_subtrees = 0;
1313 return NULL_TREE;
1317 /* Other than invisiref parms, don't walk the same tree twice. */
1318 if (p_set->contains (stmt))
1320 *walk_subtrees = 0;
1321 return NULL_TREE;
1324 switch (TREE_CODE (stmt))
1326 case ADDR_EXPR:
1327 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1329 /* If in an OpenMP context, note var uses. */
1330 if (UNLIKELY (wtd->omp_ctx != NULL)
1331 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1332 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1333 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1334 *walk_subtrees = 0;
1336 break;
1338 case RETURN_EXPR:
1339 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1340 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1341 *walk_subtrees = 0;
1342 break;
1344 case OMP_CLAUSE:
1345 switch (OMP_CLAUSE_CODE (stmt))
1347 case OMP_CLAUSE_LASTPRIVATE:
1348 /* Don't dereference an invisiref in OpenMP clauses. */
1349 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1351 *walk_subtrees = 0;
1352 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1353 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1354 cp_genericize_r, data, NULL);
1356 break;
1357 case OMP_CLAUSE_PRIVATE:
1358 /* Don't dereference an invisiref in OpenMP clauses. */
1359 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1360 *walk_subtrees = 0;
1361 else if (wtd->omp_ctx != NULL)
1363 /* Private clause doesn't cause any references to the
1364 var in outer contexts, avoid calling
1365 omp_cxx_notice_variable for it. */
1366 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1367 wtd->omp_ctx = NULL;
1368 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1369 data, NULL);
1370 wtd->omp_ctx = old;
1371 *walk_subtrees = 0;
1373 break;
1374 case OMP_CLAUSE_SHARED:
1375 case OMP_CLAUSE_FIRSTPRIVATE:
1376 case OMP_CLAUSE_COPYIN:
1377 case OMP_CLAUSE_COPYPRIVATE:
1378 case OMP_CLAUSE_INCLUSIVE:
1379 case OMP_CLAUSE_EXCLUSIVE:
1380 /* Don't dereference an invisiref in OpenMP clauses. */
1381 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1382 *walk_subtrees = 0;
1383 break;
1384 case OMP_CLAUSE_REDUCTION:
1385 case OMP_CLAUSE_IN_REDUCTION:
1386 case OMP_CLAUSE_TASK_REDUCTION:
1387 /* Don't dereference an invisiref in reduction clause's
1388 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1389 still needs to be genericized. */
1390 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1392 *walk_subtrees = 0;
1393 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1394 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1395 cp_genericize_r, data, NULL);
1396 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1397 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1398 cp_genericize_r, data, NULL);
1400 break;
1401 default:
1402 break;
1404 break;
1406 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1407 to lower this construct before scanning it, so we need to lower these
1408 before doing anything else. */
1409 case CLEANUP_STMT:
1410 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1411 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1412 : TRY_FINALLY_EXPR,
1413 void_type_node,
1414 CLEANUP_BODY (stmt),
1415 CLEANUP_EXPR (stmt));
1416 break;
1418 case IF_STMT:
1419 genericize_if_stmt (stmt_p);
1420 /* *stmt_p has changed, tail recurse to handle it again. */
1421 return cp_genericize_r (stmt_p, walk_subtrees, data);
1423 /* COND_EXPR might have incompatible types in branches if one or both
1424 arms are bitfields. Fix it up now. */
1425 case COND_EXPR:
1427 tree type_left
1428 = (TREE_OPERAND (stmt, 1)
1429 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1430 : NULL_TREE);
1431 tree type_right
1432 = (TREE_OPERAND (stmt, 2)
1433 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1434 : NULL_TREE);
1435 if (type_left
1436 && !useless_type_conversion_p (TREE_TYPE (stmt),
1437 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1439 TREE_OPERAND (stmt, 1)
1440 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1441 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1442 type_left));
1444 if (type_right
1445 && !useless_type_conversion_p (TREE_TYPE (stmt),
1446 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1448 TREE_OPERAND (stmt, 2)
1449 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1450 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1451 type_right));
1454 break;
1456 case BIND_EXPR:
1457 if (UNLIKELY (wtd->omp_ctx != NULL))
1459 tree decl;
1460 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1461 if (VAR_P (decl)
1462 && !DECL_EXTERNAL (decl)
1463 && omp_var_to_track (decl))
1465 splay_tree_node n
1466 = splay_tree_lookup (wtd->omp_ctx->variables,
1467 (splay_tree_key) decl);
1468 if (n == NULL)
1469 splay_tree_insert (wtd->omp_ctx->variables,
1470 (splay_tree_key) decl,
1471 TREE_STATIC (decl)
1472 ? OMP_CLAUSE_DEFAULT_SHARED
1473 : OMP_CLAUSE_DEFAULT_PRIVATE);
1476 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1478 /* The point here is to not sanitize static initializers. */
1479 bool no_sanitize_p = wtd->no_sanitize_p;
1480 wtd->no_sanitize_p = true;
1481 for (tree decl = BIND_EXPR_VARS (stmt);
1482 decl;
1483 decl = DECL_CHAIN (decl))
1484 if (VAR_P (decl)
1485 && TREE_STATIC (decl)
1486 && DECL_INITIAL (decl))
1487 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1488 wtd->no_sanitize_p = no_sanitize_p;
1490 wtd->bind_expr_stack.safe_push (stmt);
1491 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1492 cp_genericize_r, data, NULL);
1493 wtd->bind_expr_stack.pop ();
1494 break;
1496 case ASSERTION_STMT:
1497 case PRECONDITION_STMT:
1498 case POSTCONDITION_STMT:
1500 if (tree check = build_contract_check (stmt))
1502 *stmt_p = check;
1503 return cp_genericize_r (stmt_p, walk_subtrees, data);
1506 /* If we didn't build a check, replace it with void_node so we don't
1507 leak contracts into GENERIC. */
1508 *stmt_p = void_node;
1509 *walk_subtrees = 0;
1511 break;
1513 case USING_STMT:
1515 tree block = NULL_TREE;
1517 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1518 BLOCK, and append an IMPORTED_DECL to its
1519 BLOCK_VARS chained list. */
1520 if (wtd->bind_expr_stack.exists ())
1522 int i;
1523 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1524 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1525 break;
1527 if (block)
1529 tree decl = TREE_OPERAND (stmt, 0);
1530 gcc_assert (decl);
1532 if (undeduced_auto_decl (decl))
1533 /* Omit from the GENERIC, the back-end can't handle it. */;
1534 else
1536 tree using_directive = make_node (IMPORTED_DECL);
1537 TREE_TYPE (using_directive) = void_type_node;
1538 DECL_CONTEXT (using_directive) = current_function_decl;
1539 DECL_SOURCE_LOCATION (using_directive)
1540 = cp_expr_loc_or_input_loc (stmt);
1542 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1543 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1544 BLOCK_VARS (block) = using_directive;
1547 /* The USING_STMT won't appear in GENERIC. */
1548 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1549 *walk_subtrees = 0;
1551 break;
1553 case DECL_EXPR:
1554 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1556 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1557 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1558 *walk_subtrees = 0;
1560 else
1562 tree d = DECL_EXPR_DECL (stmt);
1563 if (VAR_P (d))
1564 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1566 break;
1568 case OMP_PARALLEL:
1569 case OMP_TASK:
1570 case OMP_TASKLOOP:
1572 struct cp_genericize_omp_taskreg omp_ctx;
1573 tree c, decl;
1574 splay_tree_node n;
1576 *walk_subtrees = 0;
1577 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1578 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1579 omp_ctx.default_shared = omp_ctx.is_parallel;
1580 omp_ctx.outer = wtd->omp_ctx;
1581 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1582 wtd->omp_ctx = &omp_ctx;
1583 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1584 switch (OMP_CLAUSE_CODE (c))
1586 case OMP_CLAUSE_SHARED:
1587 case OMP_CLAUSE_PRIVATE:
1588 case OMP_CLAUSE_FIRSTPRIVATE:
1589 case OMP_CLAUSE_LASTPRIVATE:
1590 decl = OMP_CLAUSE_DECL (c);
1591 if (decl == error_mark_node || !omp_var_to_track (decl))
1592 break;
1593 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1594 if (n != NULL)
1595 break;
1596 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1597 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1598 ? OMP_CLAUSE_DEFAULT_SHARED
1599 : OMP_CLAUSE_DEFAULT_PRIVATE);
1600 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1601 omp_cxx_notice_variable (omp_ctx.outer, decl);
1602 break;
1603 case OMP_CLAUSE_DEFAULT:
1604 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1605 omp_ctx.default_shared = true;
1606 default:
1607 break;
1609 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1610 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1611 cp_genericize_r, cp_walk_subtrees);
1612 else
1613 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1614 wtd->omp_ctx = omp_ctx.outer;
1615 splay_tree_delete (omp_ctx.variables);
1617 break;
1619 case OMP_TARGET:
1620 cfun->has_omp_target = true;
1621 break;
1623 case TRY_BLOCK:
1625 *walk_subtrees = 0;
1626 tree try_block = wtd->try_block;
1627 wtd->try_block = stmt;
1628 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1629 wtd->try_block = try_block;
1630 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1632 break;
1634 case MUST_NOT_THROW_EXPR:
1635 /* MUST_NOT_THROW_COND might be something else with TM. */
1636 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1638 *walk_subtrees = 0;
1639 tree try_block = wtd->try_block;
1640 wtd->try_block = stmt;
1641 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1642 wtd->try_block = try_block;
1644 break;
1646 case THROW_EXPR:
1648 location_t loc = location_of (stmt);
1649 if (warning_suppressed_p (stmt /* What warning? */))
1650 /* Never mind. */;
1651 else if (wtd->try_block)
1653 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1655 auto_diagnostic_group d;
1656 if (warning_at (loc, OPT_Wterminate,
1657 "%<throw%> will always call %<terminate%>")
1658 && cxx_dialect >= cxx11
1659 && DECL_DESTRUCTOR_P (current_function_decl))
1660 inform (loc, "in C++11 destructors default to %<noexcept%>");
1663 else
1665 if (warn_cxx11_compat && cxx_dialect < cxx11
1666 && DECL_DESTRUCTOR_P (current_function_decl)
1667 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1668 == NULL_TREE)
1669 && (get_defaulted_eh_spec (current_function_decl)
1670 == empty_except_spec))
1671 warning_at (loc, OPT_Wc__11_compat,
1672 "in C++11 this %<throw%> will call %<terminate%> "
1673 "because destructors default to %<noexcept%>");
1676 break;
1678 case CONVERT_EXPR:
1679 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt)));
1680 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1681 break;
1683 case SPACESHIP_EXPR:
1684 *stmt_p = genericize_spaceship (*stmt_p);
1685 break;
1687 case PTRMEM_CST:
1688 /* By the time we get here we're handing off to the back end, so we don't
1689 need or want to preserve PTRMEM_CST anymore. */
1690 *stmt_p = cplus_expand_constant (stmt);
1691 *walk_subtrees = 0;
1692 break;
1694 case MEM_REF:
1695 /* For MEM_REF, make sure not to sanitize the second operand even
1696 if it has reference type. It is just an offset with a type
1697 holding other information. There is no other processing we
1698 need to do for INTEGER_CSTs, so just ignore the second argument
1699 unconditionally. */
1700 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1701 *walk_subtrees = 0;
1702 break;
1704 case NOP_EXPR:
1705 *stmt_p = predeclare_vla (*stmt_p);
1706 if (!wtd->no_sanitize_p
1707 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1708 && TYPE_REF_P (TREE_TYPE (stmt)))
1709 ubsan_maybe_instrument_reference (stmt_p);
1710 break;
1712 case CALL_EXPR:
1713 /* Evaluate function concept checks instead of treating them as
1714 normal functions. */
1715 if (concept_check_p (stmt))
1717 *stmt_p = evaluate_concept_check (stmt);
1718 * walk_subtrees = 0;
1719 break;
1722 if (!wtd->no_sanitize_p
1723 && sanitize_flags_p ((SANITIZE_NULL
1724 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1726 tree fn = CALL_EXPR_FN (stmt);
1727 if (fn != NULL_TREE
1728 && !error_operand_p (fn)
1729 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1730 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1732 bool is_ctor
1733 = TREE_CODE (fn) == ADDR_EXPR
1734 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1735 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1736 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1737 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1738 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1739 cp_ubsan_maybe_instrument_member_call (stmt);
1741 else if (fn == NULL_TREE
1742 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1743 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1744 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1745 *walk_subtrees = 0;
1747 /* Fall through. */
1748 case AGGR_INIT_EXPR:
1749 /* For calls to a multi-versioned function, overload resolution
1750 returns the function with the highest target priority, that is,
1751 the version that will checked for dispatching first. If this
1752 version is inlinable, a direct call to this version can be made
1753 otherwise the call should go through the dispatcher. */
1755 tree fn = cp_get_callee_fndecl_nofold (stmt);
1756 if (fn && DECL_FUNCTION_VERSIONED (fn)
1757 && (current_function_decl == NULL
1758 || !targetm.target_option.can_inline_p (current_function_decl,
1759 fn)))
1760 if (tree dis = get_function_version_dispatcher (fn))
1762 mark_versions_used (dis);
1763 dis = build_address (dis);
1764 if (TREE_CODE (stmt) == CALL_EXPR)
1765 CALL_EXPR_FN (stmt) = dis;
1766 else
1767 AGGR_INIT_EXPR_FN (stmt) = dis;
1770 break;
1772 case TARGET_EXPR:
1773 if (TARGET_EXPR_INITIAL (stmt)
1774 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1775 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1776 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1777 break;
1779 case TEMPLATE_ID_EXPR:
1780 gcc_assert (concept_check_p (stmt));
1781 /* Emit the value of the concept check. */
1782 *stmt_p = evaluate_concept_check (stmt);
1783 walk_subtrees = 0;
1784 break;
1786 case OMP_DISTRIBUTE:
1787 /* Need to explicitly instantiate copy ctors on class iterators of
1788 composite distribute parallel for. */
1789 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1791 tree *data[4] = { NULL, NULL, NULL, NULL };
1792 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1793 find_combined_omp_for, data, NULL);
1794 if (inner != NULL_TREE
1795 && TREE_CODE (inner) == OMP_FOR)
1797 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1798 if (OMP_FOR_ORIG_DECLS (inner)
1799 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1800 i)) == TREE_LIST
1801 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1802 i)))
1804 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1805 /* Class iterators aren't allowed on OMP_SIMD, so the only
1806 case we need to solve is distribute parallel for. */
1807 gcc_assert (TREE_CODE (inner) == OMP_FOR
1808 && data[1]);
1809 tree orig_decl = TREE_PURPOSE (orig);
1810 tree c, cl = NULL_TREE;
1811 for (c = OMP_FOR_CLAUSES (inner);
1812 c; c = OMP_CLAUSE_CHAIN (c))
1813 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1814 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1815 && OMP_CLAUSE_DECL (c) == orig_decl)
1817 cl = c;
1818 break;
1820 if (cl == NULL_TREE)
1822 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1823 c; c = OMP_CLAUSE_CHAIN (c))
1824 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1825 && OMP_CLAUSE_DECL (c) == orig_decl)
1827 cl = c;
1828 break;
1831 if (cl)
1833 orig_decl = require_complete_type (orig_decl);
1834 tree inner_type = TREE_TYPE (orig_decl);
1835 if (orig_decl == error_mark_node)
1836 continue;
1837 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1838 inner_type = TREE_TYPE (inner_type);
1840 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1841 inner_type = TREE_TYPE (inner_type);
1842 get_copy_ctor (inner_type, tf_warning_or_error);
1847 /* FALLTHRU */
1849 case FOR_STMT:
1850 case WHILE_STMT:
1851 case DO_STMT:
1852 case SWITCH_STMT:
1853 case CONTINUE_STMT:
1854 case BREAK_STMT:
1855 case OMP_FOR:
1856 case OMP_SIMD:
1857 case OMP_LOOP:
1858 case OACC_LOOP:
1859 case STATEMENT_LIST:
1860 /* These cases are handled by shared code. */
1861 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1862 cp_genericize_r, cp_walk_subtrees);
1863 break;
1865 case BIT_CAST_EXPR:
1866 *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1867 TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1868 break;
1870 default:
1871 if (IS_TYPE_OR_DECL_P (stmt))
1872 *walk_subtrees = 0;
1873 break;
1876 p_set->add (*stmt_p);
1878 return NULL;
1881 /* Lower C++ front end trees to GENERIC in T_P. */
1883 static void
1884 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1886 struct cp_genericize_data wtd;
1888 wtd.p_set = new hash_set<tree>;
1889 wtd.bind_expr_stack.create (0);
1890 wtd.omp_ctx = NULL;
1891 wtd.try_block = NULL_TREE;
1892 wtd.no_sanitize_p = false;
1893 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1894 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1895 delete wtd.p_set;
1896 if (sanitize_flags_p (SANITIZE_VPTR))
1897 cp_ubsan_instrument_member_accesses (t_p);
1900 /* If a function that should end with a return in non-void
1901 function doesn't obviously end with return, add ubsan
1902 instrumentation code to verify it at runtime. If -fsanitize=return
1903 is not enabled, instrument __builtin_unreachable. */
1905 static void
1906 cp_maybe_instrument_return (tree fndecl)
1908 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1909 || DECL_CONSTRUCTOR_P (fndecl)
1910 || DECL_DESTRUCTOR_P (fndecl)
1911 || !targetm.warn_func_return (fndecl))
1912 return;
1914 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1915 /* Don't add __builtin_unreachable () if not optimizing, it will not
1916 improve any optimizations in that case, just break UB code.
1917 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1918 UBSan covers this with ubsan_instrument_return above where sufficient
1919 information is provided, while the __builtin_unreachable () below
1920 if return sanitization is disabled will just result in hard to
1921 understand runtime error without location. */
1922 && ((!optimize && !flag_unreachable_traps)
1923 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1924 return;
1926 tree t = DECL_SAVED_TREE (fndecl);
1927 while (t)
1929 switch (TREE_CODE (t))
1931 case BIND_EXPR:
1932 t = BIND_EXPR_BODY (t);
1933 continue;
1934 case TRY_FINALLY_EXPR:
1935 case CLEANUP_POINT_EXPR:
1936 t = TREE_OPERAND (t, 0);
1937 continue;
1938 case STATEMENT_LIST:
1940 tree_stmt_iterator i = tsi_last (t);
1941 while (!tsi_end_p (i))
1943 tree p = tsi_stmt (i);
1944 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1945 break;
1946 tsi_prev (&i);
1948 if (!tsi_end_p (i))
1950 t = tsi_stmt (i);
1951 continue;
1954 break;
1955 case RETURN_EXPR:
1956 return;
1957 default:
1958 break;
1960 break;
1962 if (t == NULL_TREE)
1963 return;
1964 tree *p = &DECL_SAVED_TREE (fndecl);
1965 if (TREE_CODE (*p) == BIND_EXPR)
1966 p = &BIND_EXPR_BODY (*p);
1968 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1969 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1970 t = ubsan_instrument_return (loc);
1971 else
1972 t = build_builtin_unreachable (BUILTINS_LOCATION);
1974 append_to_statement_list (t, p);
1977 void
1978 cp_genericize (tree fndecl)
1980 tree t;
1982 /* Fix up the types of parms passed by invisible reference. */
1983 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1984 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1986 /* If a function's arguments are copied to create a thunk,
1987 then DECL_BY_REFERENCE will be set -- but the type of the
1988 argument will be a pointer type, so we will never get
1989 here. */
1990 gcc_assert (!DECL_BY_REFERENCE (t));
1991 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1992 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1993 DECL_BY_REFERENCE (t) = 1;
1994 TREE_ADDRESSABLE (t) = 0;
1995 relayout_decl (t);
1998 /* Do the same for the return value. */
1999 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
2001 t = DECL_RESULT (fndecl);
2002 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
2003 DECL_BY_REFERENCE (t) = 1;
2004 TREE_ADDRESSABLE (t) = 0;
2005 relayout_decl (t);
2006 if (DECL_NAME (t))
2008 /* Adjust DECL_VALUE_EXPR of the original var. */
2009 tree outer = outer_curly_brace_block (current_function_decl);
2010 tree var;
2012 if (outer)
2013 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2014 if (VAR_P (var)
2015 && DECL_NAME (t) == DECL_NAME (var)
2016 && DECL_HAS_VALUE_EXPR_P (var)
2017 && DECL_VALUE_EXPR (var) == t)
2019 tree val = convert_from_reference (t);
2020 SET_DECL_VALUE_EXPR (var, val);
2021 break;
2026 /* If we're a clone, the body is already GIMPLE. */
2027 if (DECL_CLONED_FUNCTION_P (fndecl))
2028 return;
2030 /* Allow cp_genericize calls to be nested. */
2031 bc_state_t save_state;
2032 save_bc_state (&save_state);
2034 /* We do want to see every occurrence of the parms, so we can't just use
2035 walk_tree's hash functionality. */
2036 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
2038 cp_maybe_instrument_return (fndecl);
2040 /* Do everything else. */
2041 c_genericize (fndecl);
2042 restore_bc_state (&save_state);
2045 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2046 NULL if there is in fact nothing to do. ARG2 may be null if FN
2047 actually only takes one argument. */
2049 static tree
2050 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2052 tree defparm, parm, t;
2053 int i = 0;
2054 int nargs;
2055 tree *argarray;
2057 if (fn == NULL)
2058 return NULL;
2060 nargs = list_length (DECL_ARGUMENTS (fn));
2061 argarray = XALLOCAVEC (tree, nargs);
2063 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2064 if (arg2)
2065 defparm = TREE_CHAIN (defparm);
2067 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
2068 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2070 tree inner_type = TREE_TYPE (arg1);
2071 tree start1, end1, p1;
2072 tree start2 = NULL, p2 = NULL;
2073 tree ret = NULL, lab;
2075 start1 = arg1;
2076 start2 = arg2;
2079 inner_type = TREE_TYPE (inner_type);
2080 start1 = build4 (ARRAY_REF, inner_type, start1,
2081 size_zero_node, NULL, NULL);
2082 if (arg2)
2083 start2 = build4 (ARRAY_REF, inner_type, start2,
2084 size_zero_node, NULL, NULL);
2086 while (TREE_CODE (inner_type) == ARRAY_TYPE);
2087 start1 = build_fold_addr_expr_loc (input_location, start1);
2088 if (arg2)
2089 start2 = build_fold_addr_expr_loc (input_location, start2);
2091 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2092 end1 = fold_build_pointer_plus (start1, end1);
2094 p1 = create_tmp_var (TREE_TYPE (start1));
2095 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2096 append_to_statement_list (t, &ret);
2098 if (arg2)
2100 p2 = create_tmp_var (TREE_TYPE (start2));
2101 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2102 append_to_statement_list (t, &ret);
2105 lab = create_artificial_label (input_location);
2106 t = build1 (LABEL_EXPR, void_type_node, lab);
2107 append_to_statement_list (t, &ret);
2109 argarray[i++] = p1;
2110 if (arg2)
2111 argarray[i++] = p2;
2112 /* Handle default arguments. */
2113 for (parm = defparm; parm && parm != void_list_node;
2114 parm = TREE_CHAIN (parm), i++)
2115 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2116 TREE_PURPOSE (parm), fn,
2117 i - is_method, tf_warning_or_error);
2118 t = build_call_a (fn, i, argarray);
2119 t = fold_convert (void_type_node, t);
2120 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2121 append_to_statement_list (t, &ret);
2123 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2124 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2125 append_to_statement_list (t, &ret);
2127 if (arg2)
2129 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2130 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2131 append_to_statement_list (t, &ret);
2134 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2135 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2136 append_to_statement_list (t, &ret);
2138 return ret;
2140 else
2142 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2143 if (arg2)
2144 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2145 /* Handle default arguments. */
2146 for (parm = defparm; parm && parm != void_list_node;
2147 parm = TREE_CHAIN (parm), i++)
2148 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2149 TREE_PURPOSE (parm), fn,
2150 i - is_method, tf_warning_or_error);
2151 t = build_call_a (fn, i, argarray);
2152 t = fold_convert (void_type_node, t);
2153 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2157 /* Return code to initialize DECL with its default constructor, or
2158 NULL if there's nothing to do. */
2160 tree
2161 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2163 tree info = CP_OMP_CLAUSE_INFO (clause);
2164 tree ret = NULL;
2166 if (info)
2167 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2169 return ret;
2172 /* Return code to initialize DST with a copy constructor from SRC. */
2174 tree
2175 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2177 tree info = CP_OMP_CLAUSE_INFO (clause);
2178 tree ret = NULL;
2180 if (info)
2181 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2182 if (ret == NULL)
2183 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2185 return ret;
2188 /* Similarly, except use an assignment operator instead. */
2190 tree
2191 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2193 tree info = CP_OMP_CLAUSE_INFO (clause);
2194 tree ret = NULL;
2196 if (info)
2197 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2198 if (ret == NULL)
2199 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2201 return ret;
2204 /* Return code to destroy DECL. */
2206 tree
2207 cxx_omp_clause_dtor (tree clause, tree decl)
2209 tree info = CP_OMP_CLAUSE_INFO (clause);
2210 tree ret = NULL;
2212 if (info)
2213 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2215 return ret;
2218 /* True if OpenMP should privatize what this DECL points to rather
2219 than the DECL itself. */
2221 bool
2222 cxx_omp_privatize_by_reference (const_tree decl)
2224 return (TYPE_REF_P (TREE_TYPE (decl))
2225 || is_invisiref_parm (decl));
2228 /* Return true if DECL is const qualified var having no mutable member. */
2229 bool
2230 cxx_omp_const_qual_no_mutable (tree decl)
2232 tree type = TREE_TYPE (decl);
2233 if (TYPE_REF_P (type))
2235 if (!is_invisiref_parm (decl))
2236 return false;
2237 type = TREE_TYPE (type);
2239 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2241 /* NVR doesn't preserve const qualification of the
2242 variable's type. */
2243 tree outer = outer_curly_brace_block (current_function_decl);
2244 tree var;
2246 if (outer)
2247 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2248 if (VAR_P (var)
2249 && DECL_NAME (decl) == DECL_NAME (var)
2250 && (TYPE_MAIN_VARIANT (type)
2251 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2253 if (TYPE_READONLY (TREE_TYPE (var)))
2254 type = TREE_TYPE (var);
2255 break;
2260 if (type == error_mark_node)
2261 return false;
2263 /* Variables with const-qualified type having no mutable member
2264 are predetermined shared. */
2265 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2266 return true;
2268 return false;
2271 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2272 of DECL is predetermined. */
2274 enum omp_clause_default_kind
2275 cxx_omp_predetermined_sharing_1 (tree decl)
2277 /* Static data members are predetermined shared. */
2278 if (TREE_STATIC (decl))
2280 tree ctx = CP_DECL_CONTEXT (decl);
2281 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2282 return OMP_CLAUSE_DEFAULT_SHARED;
2284 if (c_omp_predefined_variable (decl))
2285 return OMP_CLAUSE_DEFAULT_SHARED;
2288 /* this may not be specified in data-sharing clauses, still we need
2289 to predetermined it firstprivate. */
2290 if (decl == current_class_ptr)
2291 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2293 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2296 /* Likewise, but also include the artificial vars. We don't want to
2297 disallow the artificial vars being mentioned in explicit clauses,
2298 as we use artificial vars e.g. for loop constructs with random
2299 access iterators other than pointers, but during gimplification
2300 we want to treat them as predetermined. */
2302 enum omp_clause_default_kind
2303 cxx_omp_predetermined_sharing (tree decl)
2305 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2306 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2307 return ret;
2309 /* Predetermine artificial variables holding integral values, those
2310 are usually result of gimplify_one_sizepos or SAVE_EXPR
2311 gimplification. */
2312 if (VAR_P (decl)
2313 && DECL_ARTIFICIAL (decl)
2314 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2315 && !(DECL_LANG_SPECIFIC (decl)
2316 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2317 return OMP_CLAUSE_DEFAULT_SHARED;
2319 /* Similarly for typeinfo symbols. */
2320 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2321 return OMP_CLAUSE_DEFAULT_SHARED;
2323 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2326 enum omp_clause_defaultmap_kind
2327 cxx_omp_predetermined_mapping (tree decl)
2329 /* Predetermine artificial variables holding integral values, those
2330 are usually result of gimplify_one_sizepos or SAVE_EXPR
2331 gimplification. */
2332 if (VAR_P (decl)
2333 && DECL_ARTIFICIAL (decl)
2334 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2335 && !(DECL_LANG_SPECIFIC (decl)
2336 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2337 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2339 if (c_omp_predefined_variable (decl))
2340 return OMP_CLAUSE_DEFAULTMAP_TO;
2342 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2345 /* Finalize an implicitly determined clause. */
2347 void
2348 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2350 tree decl, inner_type;
2351 bool make_shared = false;
2353 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2354 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2355 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2356 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2357 return;
2359 decl = OMP_CLAUSE_DECL (c);
2360 decl = require_complete_type (decl);
2361 inner_type = TREE_TYPE (decl);
2362 if (decl == error_mark_node)
2363 make_shared = true;
2364 else if (TYPE_REF_P (TREE_TYPE (decl)))
2365 inner_type = TREE_TYPE (inner_type);
2367 /* We're interested in the base element, not arrays. */
2368 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2369 inner_type = TREE_TYPE (inner_type);
2371 /* Check for special function availability by building a call to one.
2372 Save the results, because later we won't be in the right context
2373 for making these queries. */
2374 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2375 bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2376 if (!make_shared
2377 && CLASS_TYPE_P (inner_type)
2378 && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2379 true))
2380 make_shared = true;
2382 if (make_shared)
2384 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2385 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2386 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2390 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2391 disregarded in OpenMP construct, because it is going to be
2392 remapped during OpenMP lowering. SHARED is true if DECL
2393 is going to be shared, false if it is going to be privatized. */
2395 bool
2396 cxx_omp_disregard_value_expr (tree decl, bool shared)
2398 if (shared)
2399 return false;
2400 if (VAR_P (decl)
2401 && DECL_HAS_VALUE_EXPR_P (decl)
2402 && DECL_ARTIFICIAL (decl)
2403 && DECL_LANG_SPECIFIC (decl)
2404 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2405 return true;
2406 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2407 return true;
2408 return false;
2411 /* Fold expression X which is used as an rvalue if RVAL is true. */
2413 static tree
2414 cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
2416 while (true)
2418 x = cp_fold (x, flags);
2419 if (rval)
2420 x = mark_rvalue_use (x);
2421 if (rval && DECL_P (x)
2422 && !TYPE_REF_P (TREE_TYPE (x)))
2424 tree v = decl_constant_value (x);
2425 if (v != x && v != error_mark_node)
2427 x = v;
2428 continue;
2431 break;
2433 return x;
2436 tree
2437 cp_fold_maybe_rvalue (tree x, bool rval)
2439 return cp_fold_maybe_rvalue (x, rval, ff_none);
2442 /* Fold expression X which is used as an rvalue. */
2444 static tree
2445 cp_fold_rvalue (tree x, fold_flags_t flags)
2447 return cp_fold_maybe_rvalue (x, true, flags);
2450 tree
2451 cp_fold_rvalue (tree x)
2453 return cp_fold_rvalue (x, ff_none);
2456 /* Perform folding on expression X. */
2458 static tree
2459 cp_fully_fold (tree x, mce_value manifestly_const_eval)
2461 if (processing_template_decl)
2462 return x;
2463 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2464 have to call both. */
2465 if (cxx_dialect >= cxx11)
2467 x = maybe_constant_value (x, /*decl=*/NULL_TREE, manifestly_const_eval);
2468 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2469 a TARGET_EXPR; undo that here. */
2470 if (TREE_CODE (x) == TARGET_EXPR)
2471 x = TARGET_EXPR_INITIAL (x);
2472 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2473 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2474 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2475 x = TREE_OPERAND (x, 0);
2477 fold_flags_t flags = ff_none;
2478 if (manifestly_const_eval == mce_false)
2479 flags |= ff_mce_false;
2480 return cp_fold_rvalue (x, flags);
2483 tree
2484 cp_fully_fold (tree x)
2486 return cp_fully_fold (x, mce_unknown);
2489 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2490 in some cases. */
2492 tree
2493 cp_fully_fold_init (tree x)
2495 if (processing_template_decl)
2496 return x;
2497 x = cp_fully_fold (x, mce_false);
2498 cp_fold_data data (ff_mce_false);
2499 cp_walk_tree (&x, cp_fold_r, &data, NULL);
2500 return x;
2503 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2504 and certain changes are made to the folding done. Or should be (FIXME). We
2505 never touch maybe_const, as it is only used for the C front-end
2506 C_MAYBE_CONST_EXPR. */
2508 tree
2509 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2511 return cp_fold_maybe_rvalue (x, !lval);
2514 static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
2516 /* Subroutine of cp_fold. Returns which fold cache to use according
2517 to the given flags. We need multiple caches since the result of
2518 folding may depend on which flags are used. */
2520 static hash_map<tree, tree> *&
2521 get_fold_cache (fold_flags_t flags)
2523 if (flags & ff_mce_false)
2524 return fold_caches[1];
2525 else
2526 return fold_caches[0];
2529 /* Dispose of the whole FOLD_CACHE. */
2531 void
2532 clear_fold_cache (void)
2534 for (auto& fold_cache : fold_caches)
2535 if (fold_cache != NULL)
2536 fold_cache->empty ();
2539 /* This function tries to fold an expression X.
2540 To avoid combinatorial explosion, folding results are kept in fold_cache.
2541 If X is invalid, we don't fold at all.
2542 For performance reasons we don't cache expressions representing a
2543 declaration or constant.
2544 Function returns X or its folded variant. */
2546 static tree
2547 cp_fold (tree x, fold_flags_t flags)
2549 tree op0, op1, op2, op3;
2550 tree org_x = x, r = NULL_TREE;
2551 enum tree_code code;
2552 location_t loc;
2553 bool rval_ops = true;
2555 if (!x || x == error_mark_node)
2556 return x;
2558 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2559 return x;
2561 /* Don't bother to cache DECLs or constants. */
2562 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2563 return x;
2565 auto& fold_cache = get_fold_cache (flags);
2566 if (fold_cache == NULL)
2567 fold_cache = hash_map<tree, tree>::create_ggc (101);
2569 if (tree *cached = fold_cache->get (x))
2570 return *cached;
2572 uid_sensitive_constexpr_evaluation_checker c;
2574 code = TREE_CODE (x);
2575 switch (code)
2577 case CLEANUP_POINT_EXPR:
2578 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2579 effects. */
2580 r = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2581 if (!TREE_SIDE_EFFECTS (r))
2582 x = r;
2583 break;
2585 case SIZEOF_EXPR:
2586 x = fold_sizeof_expr (x);
2587 break;
2589 case VIEW_CONVERT_EXPR:
2590 rval_ops = false;
2591 /* FALLTHRU */
2592 case NON_LVALUE_EXPR:
2593 CASE_CONVERT:
2595 if (VOID_TYPE_P (TREE_TYPE (x)))
2597 /* This is just to make sure we don't end up with casts to
2598 void from error_mark_node. If we just return x, then
2599 cp_fold_r might fold the operand into error_mark_node and
2600 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2601 during gimplification doesn't like such casts.
2602 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2603 folding of the operand should be in the caches and if in cp_fold_r
2604 it will modify it in place. */
2605 op0 = cp_fold (TREE_OPERAND (x, 0), flags);
2606 if (op0 == error_mark_node)
2607 x = error_mark_node;
2608 break;
2611 loc = EXPR_LOCATION (x);
2612 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2614 if (code == CONVERT_EXPR
2615 && SCALAR_TYPE_P (TREE_TYPE (x))
2616 && op0 != void_node)
2617 /* During parsing we used convert_to_*_nofold; re-convert now using the
2618 folding variants, since fold() doesn't do those transformations. */
2619 x = fold (convert (TREE_TYPE (x), op0));
2620 else if (op0 != TREE_OPERAND (x, 0))
2622 if (op0 == error_mark_node)
2623 x = error_mark_node;
2624 else
2625 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2627 else
2628 x = fold (x);
2630 /* Conversion of an out-of-range value has implementation-defined
2631 behavior; the language considers it different from arithmetic
2632 overflow, which is undefined. */
2633 if (TREE_CODE (op0) == INTEGER_CST
2634 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2635 TREE_OVERFLOW (x) = false;
2637 break;
2639 case EXCESS_PRECISION_EXPR:
2640 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2641 x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
2642 break;
2644 case INDIRECT_REF:
2645 /* We don't need the decltype(auto) obfuscation anymore. */
2646 if (REF_PARENTHESIZED_P (x))
2648 tree p = maybe_undo_parenthesized_ref (x);
2649 if (p != x)
2650 return cp_fold (p, flags);
2652 goto unary;
2654 case ADDR_EXPR:
2655 loc = EXPR_LOCATION (x);
2656 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
2658 /* Cope with user tricks that amount to offsetof. */
2659 if (op0 != error_mark_node
2660 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2662 tree val = get_base_address (op0);
2663 if (val
2664 && INDIRECT_REF_P (val)
2665 && COMPLETE_TYPE_P (TREE_TYPE (val))
2666 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2668 val = TREE_OPERAND (val, 0);
2669 STRIP_NOPS (val);
2670 val = maybe_constant_value (val);
2671 if (TREE_CODE (val) == INTEGER_CST)
2672 return fold_offsetof (op0, TREE_TYPE (x));
2675 goto finish_unary;
2677 case REALPART_EXPR:
2678 case IMAGPART_EXPR:
2679 rval_ops = false;
2680 /* FALLTHRU */
2681 case CONJ_EXPR:
2682 case FIX_TRUNC_EXPR:
2683 case FLOAT_EXPR:
2684 case NEGATE_EXPR:
2685 case ABS_EXPR:
2686 case ABSU_EXPR:
2687 case BIT_NOT_EXPR:
2688 case TRUTH_NOT_EXPR:
2689 case FIXED_CONVERT_EXPR:
2690 unary:
2692 loc = EXPR_LOCATION (x);
2693 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2695 finish_unary:
2696 if (op0 != TREE_OPERAND (x, 0))
2698 if (op0 == error_mark_node)
2699 x = error_mark_node;
2700 else
2702 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2703 if (code == INDIRECT_REF
2704 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2706 TREE_READONLY (x) = TREE_READONLY (org_x);
2707 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2708 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2712 else
2713 x = fold (x);
2715 gcc_assert (TREE_CODE (x) != COND_EXPR
2716 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2717 break;
2719 case UNARY_PLUS_EXPR:
2720 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2721 if (op0 == error_mark_node)
2722 x = error_mark_node;
2723 else
2724 x = fold_convert (TREE_TYPE (x), op0);
2725 break;
2727 case POSTDECREMENT_EXPR:
2728 case POSTINCREMENT_EXPR:
2729 case INIT_EXPR:
2730 case PREDECREMENT_EXPR:
2731 case PREINCREMENT_EXPR:
2732 case COMPOUND_EXPR:
2733 case MODIFY_EXPR:
2734 rval_ops = false;
2735 /* FALLTHRU */
2736 case POINTER_PLUS_EXPR:
2737 case PLUS_EXPR:
2738 case POINTER_DIFF_EXPR:
2739 case MINUS_EXPR:
2740 case MULT_EXPR:
2741 case TRUNC_DIV_EXPR:
2742 case CEIL_DIV_EXPR:
2743 case FLOOR_DIV_EXPR:
2744 case ROUND_DIV_EXPR:
2745 case TRUNC_MOD_EXPR:
2746 case CEIL_MOD_EXPR:
2747 case ROUND_MOD_EXPR:
2748 case RDIV_EXPR:
2749 case EXACT_DIV_EXPR:
2750 case MIN_EXPR:
2751 case MAX_EXPR:
2752 case LSHIFT_EXPR:
2753 case RSHIFT_EXPR:
2754 case LROTATE_EXPR:
2755 case RROTATE_EXPR:
2756 case BIT_AND_EXPR:
2757 case BIT_IOR_EXPR:
2758 case BIT_XOR_EXPR:
2759 case TRUTH_AND_EXPR:
2760 case TRUTH_ANDIF_EXPR:
2761 case TRUTH_OR_EXPR:
2762 case TRUTH_ORIF_EXPR:
2763 case TRUTH_XOR_EXPR:
2764 case LT_EXPR: case LE_EXPR:
2765 case GT_EXPR: case GE_EXPR:
2766 case EQ_EXPR: case NE_EXPR:
2767 case UNORDERED_EXPR: case ORDERED_EXPR:
2768 case UNLT_EXPR: case UNLE_EXPR:
2769 case UNGT_EXPR: case UNGE_EXPR:
2770 case UNEQ_EXPR: case LTGT_EXPR:
2771 case RANGE_EXPR: case COMPLEX_EXPR:
2773 loc = EXPR_LOCATION (x);
2774 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2775 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1), flags);
2777 /* decltype(nullptr) has only one value, so optimize away all comparisons
2778 with that type right away, keeping them in the IL causes troubles for
2779 various optimizations. */
2780 if (COMPARISON_CLASS_P (org_x)
2781 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2782 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2784 switch (code)
2786 case EQ_EXPR:
2787 x = constant_boolean_node (true, TREE_TYPE (x));
2788 break;
2789 case NE_EXPR:
2790 x = constant_boolean_node (false, TREE_TYPE (x));
2791 break;
2792 default:
2793 gcc_unreachable ();
2795 return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2796 op0, op1);
2799 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2801 if (op0 == error_mark_node || op1 == error_mark_node)
2802 x = error_mark_node;
2803 else
2804 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2806 else
2807 x = fold (x);
2809 /* This is only needed for -Wnonnull-compare and only if
2810 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2811 generation, we do it always. */
2812 if (COMPARISON_CLASS_P (org_x))
2814 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2816 else if (COMPARISON_CLASS_P (x))
2818 if (warn_nonnull_compare
2819 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2820 suppress_warning (x, OPT_Wnonnull_compare);
2822 /* Otherwise give up on optimizing these, let GIMPLE folders
2823 optimize those later on. */
2824 else if (op0 != TREE_OPERAND (org_x, 0)
2825 || op1 != TREE_OPERAND (org_x, 1))
2827 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2828 if (warn_nonnull_compare
2829 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2830 suppress_warning (x, OPT_Wnonnull_compare);
2832 else
2833 x = org_x;
2836 break;
2838 case VEC_COND_EXPR:
2839 case COND_EXPR:
2840 loc = EXPR_LOCATION (x);
2841 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2842 op1 = cp_fold (TREE_OPERAND (x, 1), flags);
2843 op2 = cp_fold (TREE_OPERAND (x, 2), flags);
2845 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2847 warning_sentinel s (warn_int_in_bool_context);
2848 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2849 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2850 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2851 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2853 else if (VOID_TYPE_P (TREE_TYPE (x)))
2855 if (TREE_CODE (op0) == INTEGER_CST)
2857 /* If the condition is constant, fold can fold away
2858 the COND_EXPR. If some statement-level uses of COND_EXPR
2859 have one of the branches NULL, avoid folding crash. */
2860 if (!op1)
2861 op1 = build_empty_stmt (loc);
2862 if (!op2)
2863 op2 = build_empty_stmt (loc);
2865 else
2867 /* Otherwise, don't bother folding a void condition, since
2868 it can't produce a constant value. */
2869 if (op0 != TREE_OPERAND (x, 0)
2870 || op1 != TREE_OPERAND (x, 1)
2871 || op2 != TREE_OPERAND (x, 2))
2872 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2873 break;
2877 if (op0 != TREE_OPERAND (x, 0)
2878 || op1 != TREE_OPERAND (x, 1)
2879 || op2 != TREE_OPERAND (x, 2))
2881 if (op0 == error_mark_node
2882 || op1 == error_mark_node
2883 || op2 == error_mark_node)
2884 x = error_mark_node;
2885 else
2886 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2888 else
2889 x = fold (x);
2891 /* A COND_EXPR might have incompatible types in branches if one or both
2892 arms are bitfields. If folding exposed such a branch, fix it up. */
2893 if (TREE_CODE (x) != code
2894 && x != error_mark_node
2895 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2896 x = fold_convert (TREE_TYPE (org_x), x);
2898 break;
2900 case CALL_EXPR:
2902 tree callee = get_callee_fndecl (x);
2904 /* "Inline" calls to std::move/forward and other cast-like functions
2905 by simply folding them into a corresponding cast to their return
2906 type. This is cheaper than relying on the middle end to do so, and
2907 also means we avoid generating useless debug info for them at all.
2909 At this point the argument has already been converted into a
2910 reference, so it suffices to use a NOP_EXPR to express the
2911 cast. */
2912 if ((OPTION_SET_P (flag_fold_simple_inlines)
2913 ? flag_fold_simple_inlines
2914 : !flag_no_inline)
2915 && call_expr_nargs (x) == 1
2916 && decl_in_std_namespace_p (callee)
2917 && DECL_NAME (callee) != NULL_TREE
2918 && (id_equal (DECL_NAME (callee), "move")
2919 || id_equal (DECL_NAME (callee), "forward")
2920 || id_equal (DECL_NAME (callee), "addressof")
2921 /* This addressof equivalent is used heavily in libstdc++. */
2922 || id_equal (DECL_NAME (callee), "__addressof")
2923 || id_equal (DECL_NAME (callee), "as_const")))
2925 r = CALL_EXPR_ARG (x, 0);
2926 /* Check that the return and argument types are sane before
2927 folding. */
2928 if (INDIRECT_TYPE_P (TREE_TYPE (x))
2929 && INDIRECT_TYPE_P (TREE_TYPE (r)))
2931 if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
2932 r = build_nop (TREE_TYPE (x), r);
2933 x = cp_fold (r, flags);
2934 break;
2938 int sv = optimize, nw = sv;
2940 /* Some built-in function calls will be evaluated at compile-time in
2941 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2942 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2943 if (callee && fndecl_built_in_p (callee) && !optimize
2944 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2945 && current_function_decl
2946 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2947 nw = 1;
2949 if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2951 iloc_sentinel ils (EXPR_LOCATION (x));
2952 switch (DECL_FE_FUNCTION_CODE (callee))
2954 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2955 /* Defer folding __builtin_is_constant_evaluated unless
2956 we know this isn't a manifestly constant-evaluated
2957 context. */
2958 if (flags & ff_mce_false)
2959 x = boolean_false_node;
2960 break;
2961 case CP_BUILT_IN_SOURCE_LOCATION:
2962 x = fold_builtin_source_location (x);
2963 break;
2964 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2965 x = fold_builtin_is_corresponding_member
2966 (EXPR_LOCATION (x), call_expr_nargs (x),
2967 &CALL_EXPR_ARG (x, 0));
2968 break;
2969 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2970 x = fold_builtin_is_pointer_inverconvertible_with_class
2971 (EXPR_LOCATION (x), call_expr_nargs (x),
2972 &CALL_EXPR_ARG (x, 0));
2973 break;
2974 default:
2975 break;
2977 break;
2980 if (callee
2981 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2982 BUILT_IN_FRONTEND))
2984 x = fold_builtin_source_location (x);
2985 break;
2988 bool changed = false;
2989 int m = call_expr_nargs (x);
2990 for (int i = 0; i < m; i++)
2992 r = cp_fold (CALL_EXPR_ARG (x, i), flags);
2993 if (r != CALL_EXPR_ARG (x, i))
2995 if (r == error_mark_node)
2997 x = error_mark_node;
2998 break;
3000 if (!changed)
3001 x = copy_node (x);
3002 CALL_EXPR_ARG (x, i) = r;
3003 changed = true;
3006 if (x == error_mark_node)
3007 break;
3009 optimize = nw;
3010 r = fold (x);
3011 optimize = sv;
3013 if (TREE_CODE (r) != CALL_EXPR)
3015 x = cp_fold (r, flags);
3016 break;
3019 optimize = nw;
3021 /* Invoke maybe_constant_value for functions declared
3022 constexpr and not called with AGGR_INIT_EXPRs.
3023 TODO:
3024 Do constexpr expansion of expressions where the call itself is not
3025 constant, but the call followed by an INDIRECT_REF is. */
3026 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
3027 && !flag_no_inline)
3029 mce_value manifestly_const_eval = mce_unknown;
3030 if (flags & ff_mce_false)
3031 /* Allow folding __builtin_is_constant_evaluated to false during
3032 constexpr evaluation of this call. */
3033 manifestly_const_eval = mce_false;
3034 r = maybe_constant_value (x, /*decl=*/NULL_TREE,
3035 manifestly_const_eval);
3037 optimize = sv;
3039 if (TREE_CODE (r) != CALL_EXPR)
3041 if (DECL_CONSTRUCTOR_P (callee))
3043 loc = EXPR_LOCATION (x);
3044 tree s = build_fold_indirect_ref_loc (loc,
3045 CALL_EXPR_ARG (x, 0));
3046 r = cp_build_init_expr (s, r);
3048 x = r;
3049 break;
3052 break;
3055 case CONSTRUCTOR:
3057 unsigned i;
3058 constructor_elt *p;
3059 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
3060 vec<constructor_elt, va_gc> *nelts = NULL;
3061 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
3063 tree op = cp_fold (p->value, flags);
3064 if (op != p->value)
3066 if (op == error_mark_node)
3068 x = error_mark_node;
3069 vec_free (nelts);
3070 break;
3072 if (nelts == NULL)
3073 nelts = elts->copy ();
3074 (*nelts)[i].value = op;
3077 if (nelts)
3079 x = build_constructor (TREE_TYPE (x), nelts);
3080 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
3081 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
3082 CONSTRUCTOR_MUTABLE_POISON (x)
3083 = CONSTRUCTOR_MUTABLE_POISON (org_x);
3085 if (VECTOR_TYPE_P (TREE_TYPE (x)))
3086 x = fold (x);
3087 break;
3089 case TREE_VEC:
3091 bool changed = false;
3092 int n = TREE_VEC_LENGTH (x);
3094 for (int i = 0; i < n; i++)
3096 tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
3097 if (op != TREE_VEC_ELT (x, i))
3099 if (!changed)
3100 x = copy_node (x);
3101 TREE_VEC_ELT (x, i) = op;
3102 changed = true;
3107 break;
3109 case ARRAY_REF:
3110 case ARRAY_RANGE_REF:
3112 loc = EXPR_LOCATION (x);
3113 op0 = cp_fold (TREE_OPERAND (x, 0), flags);
3114 op1 = cp_fold (TREE_OPERAND (x, 1), flags);
3115 op2 = cp_fold (TREE_OPERAND (x, 2), flags);
3116 op3 = cp_fold (TREE_OPERAND (x, 3), flags);
3118 if (op0 != TREE_OPERAND (x, 0)
3119 || op1 != TREE_OPERAND (x, 1)
3120 || op2 != TREE_OPERAND (x, 2)
3121 || op3 != TREE_OPERAND (x, 3))
3123 if (op0 == error_mark_node
3124 || op1 == error_mark_node
3125 || op2 == error_mark_node
3126 || op3 == error_mark_node)
3127 x = error_mark_node;
3128 else
3130 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3131 TREE_READONLY (x) = TREE_READONLY (org_x);
3132 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3133 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3137 x = fold (x);
3138 break;
3140 case SAVE_EXPR:
3141 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3142 folding, evaluates to an invariant. In that case no need to wrap
3143 this folded tree with a SAVE_EXPR. */
3144 r = cp_fold (TREE_OPERAND (x, 0), flags);
3145 if (tree_invariant_p (r))
3146 x = r;
3147 break;
3149 case REQUIRES_EXPR:
3150 x = evaluate_requires_expr (x);
3151 break;
3153 default:
3154 return org_x;
3157 if (EXPR_P (x) && TREE_CODE (x) == code)
3159 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3160 copy_warning (x, org_x);
3163 if (!c.evaluation_restricted_p ())
3165 fold_cache->put (org_x, x);
3166 /* Prevent that we try to fold an already folded result again. */
3167 if (x != org_x)
3168 fold_cache->put (x, x);
3171 return x;
3174 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3176 tree
3177 lookup_hotness_attribute (tree list)
3179 for (; list; list = TREE_CHAIN (list))
3181 tree name = get_attribute_name (list);
3182 if ((is_attribute_p ("hot", name)
3183 || is_attribute_p ("cold", name)
3184 || is_attribute_p ("likely", name)
3185 || is_attribute_p ("unlikely", name))
3186 && is_attribute_namespace_p ("", list))
3187 break;
3189 return list;
3192 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3194 static tree
3195 remove_hotness_attribute (tree list)
3197 for (tree *p = &list; *p; )
3199 tree l = *p;
3200 tree name = get_attribute_name (l);
3201 if ((is_attribute_p ("hot", name)
3202 || is_attribute_p ("cold", name)
3203 || is_attribute_p ("likely", name)
3204 || is_attribute_p ("unlikely", name))
3205 && is_attribute_namespace_p ("", l))
3207 *p = TREE_CHAIN (l);
3208 continue;
3210 p = &TREE_CHAIN (l);
3212 return list;
3215 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3216 PREDICT_EXPR. */
3218 tree
3219 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3221 if (std_attrs == error_mark_node)
3222 return std_attrs;
3223 if (tree attr = lookup_hotness_attribute (std_attrs))
3225 tree name = get_attribute_name (attr);
3226 bool hot = (is_attribute_p ("hot", name)
3227 || is_attribute_p ("likely", name));
3228 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3229 hot ? TAKEN : NOT_TAKEN);
3230 SET_EXPR_LOCATION (pred, attrs_loc);
3231 add_stmt (pred);
3232 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3233 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3234 get_attribute_name (other), name);
3235 std_attrs = remove_hotness_attribute (std_attrs);
3237 return std_attrs;
3240 /* Build IFN_ASSUME internal call for assume condition ARG. */
3242 tree
3243 build_assume_call (location_t loc, tree arg)
3245 if (!processing_template_decl)
3246 arg = fold_build_cleanup_point_expr (TREE_TYPE (arg), arg);
3247 return build_call_expr_internal_loc (loc, IFN_ASSUME, void_type_node,
3248 1, arg);
3251 /* If [[assume (cond)]] appears on this statement, handle it. */
3253 tree
3254 process_stmt_assume_attribute (tree std_attrs, tree statement,
3255 location_t attrs_loc)
3257 if (std_attrs == error_mark_node)
3258 return std_attrs;
3259 tree attr = lookup_attribute ("gnu", "assume", std_attrs);
3260 if (!attr)
3261 return std_attrs;
3262 /* The next token after the assume attribute is not ';'. */
3263 if (statement)
3265 warning_at (attrs_loc, OPT_Wattributes,
3266 "%<assume%> attribute not followed by %<;%>");
3267 attr = NULL_TREE;
3269 for (; attr; attr = lookup_attribute ("gnu", "assume", TREE_CHAIN (attr)))
3271 tree args = TREE_VALUE (attr);
3272 if (args && PACK_EXPANSION_P (args))
3274 auto_diagnostic_group d;
3275 error_at (attrs_loc, "pack expansion of %qE attribute",
3276 get_attribute_name (attr));
3277 if (cxx_dialect >= cxx17)
3278 inform (attrs_loc, "use fold expression in the attribute "
3279 "argument instead");
3280 continue;
3282 int nargs = list_length (args);
3283 if (nargs != 1)
3285 auto_diagnostic_group d;
3286 error_at (attrs_loc, "wrong number of arguments specified for "
3287 "%qE attribute", get_attribute_name (attr));
3288 inform (attrs_loc, "expected %i, found %i", 1, nargs);
3290 else
3292 tree arg = TREE_VALUE (args);
3293 if (!type_dependent_expression_p (arg))
3294 arg = contextual_conv_bool (arg, tf_warning_or_error);
3295 if (error_operand_p (arg))
3296 continue;
3297 finish_expr_stmt (build_assume_call (attrs_loc, arg));
3300 return remove_attribute ("gnu", "assume", std_attrs);
3303 /* Return the type std::source_location::__impl after performing
3304 verification on it. */
3306 tree
3307 get_source_location_impl_type ()
3309 tree name = get_identifier ("source_location");
3310 tree decl = lookup_qualified_name (std_node, name);
3311 if (TREE_CODE (decl) != TYPE_DECL)
3313 auto_diagnostic_group d;
3314 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3315 qualified_name_lookup_error (std_node, name, decl, input_location);
3316 else
3317 error ("%qD is not a type", decl);
3318 return error_mark_node;
3320 name = get_identifier ("__impl");
3321 tree type = TREE_TYPE (decl);
3322 decl = lookup_qualified_name (type, name);
3323 if (TREE_CODE (decl) != TYPE_DECL)
3325 auto_diagnostic_group d;
3326 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3327 qualified_name_lookup_error (type, name, decl, input_location);
3328 else
3329 error ("%qD is not a type", decl);
3330 return error_mark_node;
3332 type = TREE_TYPE (decl);
3333 if (TREE_CODE (type) != RECORD_TYPE)
3335 error ("%qD is not a class type", decl);
3336 return error_mark_node;
3339 int cnt = 0;
3340 for (tree field = TYPE_FIELDS (type);
3341 (field = next_aggregate_field (field)) != NULL_TREE;
3342 field = DECL_CHAIN (field))
3344 if (DECL_NAME (field) != NULL_TREE)
3346 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3347 if (strcmp (n, "_M_file_name") == 0
3348 || strcmp (n, "_M_function_name") == 0)
3350 if (TREE_TYPE (field) != const_string_type_node)
3352 error ("%qD does not have %<const char *%> type", field);
3353 return error_mark_node;
3355 cnt++;
3356 continue;
3358 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3360 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3362 error ("%qD does not have integral type", field);
3363 return error_mark_node;
3365 cnt++;
3366 continue;
3369 cnt = 0;
3370 break;
3372 if (cnt != 4)
3374 error ("%<std::source_location::__impl%> does not contain only "
3375 "non-static data members %<_M_file_name%>, "
3376 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3377 return error_mark_node;
3379 return build_qualified_type (type, TYPE_QUAL_CONST);
3382 /* Type for source_location_table hash_set. */
3383 struct GTY((for_user)) source_location_table_entry {
3384 location_t loc;
3385 unsigned uid;
3386 tree var;
3389 /* Traits class for function start hash maps below. */
3391 struct source_location_table_entry_hash
3392 : ggc_remove <source_location_table_entry>
3394 typedef source_location_table_entry value_type;
3395 typedef source_location_table_entry compare_type;
3397 static hashval_t
3398 hash (const source_location_table_entry &ref)
3400 inchash::hash hstate (0);
3401 hstate.add_int (ref.loc);
3402 hstate.add_int (ref.uid);
3403 return hstate.end ();
3406 static bool
3407 equal (const source_location_table_entry &ref1,
3408 const source_location_table_entry &ref2)
3410 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3413 static void
3414 mark_deleted (source_location_table_entry &ref)
3416 ref.loc = UNKNOWN_LOCATION;
3417 ref.uid = -1U;
3418 ref.var = NULL_TREE;
3421 static const bool empty_zero_p = true;
3423 static void
3424 mark_empty (source_location_table_entry &ref)
3426 ref.loc = UNKNOWN_LOCATION;
3427 ref.uid = 0;
3428 ref.var = NULL_TREE;
3431 static bool
3432 is_deleted (const source_location_table_entry &ref)
3434 return (ref.loc == UNKNOWN_LOCATION
3435 && ref.uid == -1U
3436 && ref.var == NULL_TREE);
3439 static bool
3440 is_empty (const source_location_table_entry &ref)
3442 return (ref.loc == UNKNOWN_LOCATION
3443 && ref.uid == 0
3444 && ref.var == NULL_TREE);
3447 static void
3448 pch_nx (source_location_table_entry &p)
3450 extern void gt_pch_nx (source_location_table_entry &);
3451 gt_pch_nx (p);
3454 static void
3455 pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3457 extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3458 void *);
3459 gt_pch_nx (&p, op, cookie);
3463 static GTY(()) hash_table <source_location_table_entry_hash>
3464 *source_location_table;
3465 static GTY(()) unsigned int source_location_id;
3467 /* Fold the __builtin_source_location () call T. */
3469 tree
3470 fold_builtin_source_location (const_tree t)
3472 gcc_assert (TREE_CODE (t) == CALL_EXPR);
3473 /* TREE_TYPE (t) is const std::source_location::__impl* */
3474 tree source_location_impl = TREE_TYPE (TREE_TYPE (t));
3475 if (source_location_impl == error_mark_node)
3476 return build_zero_cst (const_ptr_type_node);
3477 gcc_assert (CLASS_TYPE_P (source_location_impl)
3478 && id_equal (TYPE_IDENTIFIER (source_location_impl), "__impl"));
3480 location_t loc = EXPR_LOCATION (t);
3481 if (source_location_table == NULL)
3482 source_location_table
3483 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3484 const line_map_ordinary *map;
3485 source_location_table_entry entry;
3486 entry.loc
3487 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3488 &map);
3489 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3490 entry.var = error_mark_node;
3491 source_location_table_entry *entryp
3492 = source_location_table->find_slot (entry, INSERT);
3493 tree var;
3494 if (entryp->var)
3495 var = entryp->var;
3496 else
3498 char tmp_name[32];
3499 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3500 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3501 source_location_impl);
3502 TREE_STATIC (var) = 1;
3503 TREE_PUBLIC (var) = 0;
3504 DECL_ARTIFICIAL (var) = 1;
3505 DECL_IGNORED_P (var) = 1;
3506 DECL_EXTERNAL (var) = 0;
3507 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3508 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3509 layout_decl (var, 0);
3511 vec<constructor_elt, va_gc> *v = NULL;
3512 vec_alloc (v, 4);
3513 for (tree field = TYPE_FIELDS (source_location_impl);
3514 (field = next_aggregate_field (field)) != NULL_TREE;
3515 field = DECL_CHAIN (field))
3517 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3518 tree val = NULL_TREE;
3519 if (strcmp (n, "_M_file_name") == 0)
3521 if (const char *fname = LOCATION_FILE (loc))
3523 fname = remap_macro_filename (fname);
3524 val = build_string_literal (fname);
3526 else
3527 val = build_string_literal ("");
3529 else if (strcmp (n, "_M_function_name") == 0)
3531 const char *name = "";
3533 if (current_function_decl)
3534 name = cxx_printable_name (current_function_decl, 2);
3536 val = build_string_literal (name);
3538 else if (strcmp (n, "_M_line") == 0)
3539 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3540 else if (strcmp (n, "_M_column") == 0)
3541 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3542 else
3543 gcc_unreachable ();
3544 CONSTRUCTOR_APPEND_ELT (v, field, val);
3547 tree ctor = build_constructor (source_location_impl, v);
3548 TREE_CONSTANT (ctor) = 1;
3549 TREE_STATIC (ctor) = 1;
3550 DECL_INITIAL (var) = ctor;
3551 varpool_node::finalize_decl (var);
3552 *entryp = entry;
3553 entryp->var = var;
3556 return build_fold_addr_expr_with_type_loc (loc, var, TREE_TYPE (t));
3559 #include "gt-cp-cp-gimplify.h"