libgomp, testsuite: Do not call nonstandard functions
[official-gcc.git] / gcc / cp / cp-gimplify.cc
blob206e791fcfd7f15038609fb89e399e7ebe434f53
1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44 #include "opts.h"
46 /* Flags for cp_fold and cp_fold_r. */
48 enum fold_flags {
49 ff_none = 0,
50 /* Whether we're being called from cp_fold_function. */
51 ff_genericize = 1 << 0,
52 /* Whether we're folding a point where we know we're
53 definitely not in a manifestly constant-evaluated
54 context. */
55 ff_mce_false = 1 << 1,
58 using fold_flags_t = int;
60 struct cp_fold_data
62 hash_set<tree> pset;
63 fold_flags_t flags;
64 cp_fold_data (fold_flags_t flags): flags (flags) {}
67 /* Forward declarations. */
69 static tree cp_genericize_r (tree *, int *, void *);
70 static tree cp_fold_r (tree *, int *, void *);
71 static void cp_genericize_tree (tree*, bool);
72 static tree cp_fold (tree, fold_flags_t);
74 /* Genericize a TRY_BLOCK. */
76 static void
77 genericize_try_block (tree *stmt_p)
79 tree body = TRY_STMTS (*stmt_p);
80 tree cleanup = TRY_HANDLERS (*stmt_p);
82 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
85 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
87 static void
88 genericize_catch_block (tree *stmt_p)
90 tree type = HANDLER_TYPE (*stmt_p);
91 tree body = HANDLER_BODY (*stmt_p);
93 /* FIXME should the caught type go in TREE_TYPE? */
94 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
97 /* A terser interface for building a representation of an exception
98 specification. */
100 static tree
101 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
103 tree t;
105 /* FIXME should the allowed types go in TREE_TYPE? */
106 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
107 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
109 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
110 append_to_statement_list (body, &TREE_OPERAND (t, 0));
112 return t;
115 /* Genericize an EH_SPEC_BLOCK by converting it to a
116 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
118 static void
119 genericize_eh_spec_block (tree *stmt_p)
121 tree body = EH_SPEC_STMTS (*stmt_p);
122 tree allowed = EH_SPEC_RAISES (*stmt_p);
123 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
125 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
126 suppress_warning (*stmt_p);
127 suppress_warning (TREE_OPERAND (*stmt_p, 1));
130 /* Return the first non-compound statement in STMT. */
132 tree
133 first_stmt (tree stmt)
135 switch (TREE_CODE (stmt))
137 case STATEMENT_LIST:
138 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
139 return first_stmt (p->stmt);
140 return void_node;
142 case BIND_EXPR:
143 return first_stmt (BIND_EXPR_BODY (stmt));
145 default:
146 return stmt;
150 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
152 static void
153 genericize_if_stmt (tree *stmt_p)
155 tree stmt, cond, then_, else_;
156 location_t locus = EXPR_LOCATION (*stmt_p);
158 stmt = *stmt_p;
159 cond = IF_COND (stmt);
160 then_ = THEN_CLAUSE (stmt);
161 else_ = ELSE_CLAUSE (stmt);
163 if (then_ && else_)
165 tree ft = first_stmt (then_);
166 tree fe = first_stmt (else_);
167 br_predictor pr;
168 if (TREE_CODE (ft) == PREDICT_EXPR
169 && TREE_CODE (fe) == PREDICT_EXPR
170 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
171 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
173 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
174 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
175 warning_at (&richloc, OPT_Wattributes,
176 "both branches of %<if%> statement marked as %qs",
177 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
181 if (!then_)
182 then_ = build_empty_stmt (locus);
183 if (!else_)
184 else_ = build_empty_stmt (locus);
186 /* consteval if has been verified not to have the then_/else_ blocks
187 entered by gotos/case labels from elsewhere, and as then_ block
188 can contain unfolded immediate function calls, we have to discard
189 the then_ block regardless of whether else_ has side-effects or not. */
190 if (IF_STMT_CONSTEVAL_P (stmt))
192 if (block_may_fallthru (then_))
193 stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
194 void_node, else_);
195 else
196 stmt = else_;
198 else if (IF_STMT_CONSTEXPR_P (stmt))
199 stmt = integer_nonzerop (cond) ? then_ : else_;
200 /* ??? This optimization doesn't seem to belong here, but removing it
201 causes -Wreturn-type regressions (e.g. 107310). */
202 else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
203 stmt = then_;
204 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
205 stmt = else_;
206 else
207 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
208 protected_set_expr_location_if_unset (stmt, locus);
209 *stmt_p = stmt;
212 /* Hook into the middle of gimplifying an OMP_FOR node. */
214 static enum gimplify_status
215 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
217 tree for_stmt = *expr_p;
218 gimple_seq seq = NULL;
220 /* Protect ourselves from recursion. */
221 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
222 return GS_UNHANDLED;
223 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
225 gimplify_and_add (for_stmt, &seq);
226 gimple_seq_add_seq (pre_p, seq);
228 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
230 return GS_ALL_DONE;
233 /* Gimplify an EXPR_STMT node. */
235 static void
236 gimplify_expr_stmt (tree *stmt_p)
238 tree stmt = EXPR_STMT_EXPR (*stmt_p);
240 if (stmt == error_mark_node)
241 stmt = NULL;
243 /* Gimplification of a statement expression will nullify the
244 statement if all its side effects are moved to *PRE_P and *POST_P.
246 In this case we will not want to emit the gimplified statement.
247 However, we may still want to emit a warning, so we do that before
248 gimplification. */
249 if (stmt && warn_unused_value)
251 if (!TREE_SIDE_EFFECTS (stmt))
253 if (!IS_EMPTY_STMT (stmt)
254 && !VOID_TYPE_P (TREE_TYPE (stmt))
255 && !warning_suppressed_p (stmt, OPT_Wunused_value))
256 warning (OPT_Wunused_value, "statement with no effect");
258 else
259 warn_if_unused_value (stmt, input_location);
262 if (stmt == NULL_TREE)
263 stmt = alloc_stmt_list ();
265 *stmt_p = stmt;
268 /* Gimplify initialization from an AGGR_INIT_EXPR. */
270 static void
271 cp_gimplify_init_expr (tree *expr_p)
273 tree from = TREE_OPERAND (*expr_p, 1);
274 tree to = TREE_OPERAND (*expr_p, 0);
275 tree t;
277 if (TREE_CODE (from) == TARGET_EXPR)
278 if (tree init = TARGET_EXPR_INITIAL (from))
280 /* Make sure that we expected to elide this temporary. But also allow
281 gimplify_modify_expr_rhs to elide temporaries of trivial type. */
282 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from)
283 || !TREE_ADDRESSABLE (TREE_TYPE (from)));
284 if (target_expr_needs_replace (from))
286 /* If this was changed by cp_genericize_target_expr, we need to
287 walk into it to replace uses of the slot. */
288 replace_decl (&init, TARGET_EXPR_SLOT (from), to);
289 *expr_p = init;
290 return;
292 else
293 from = init;
296 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
297 inside the TARGET_EXPR. */
298 for (t = from; t; )
300 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
302 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
303 replace the slot operand with our target.
305 Should we add a target parm to gimplify_expr instead? No, as in this
306 case we want to replace the INIT_EXPR. */
307 if (TREE_CODE (sub) == AGGR_INIT_EXPR
308 || TREE_CODE (sub) == VEC_INIT_EXPR)
310 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
311 AGGR_INIT_EXPR_SLOT (sub) = to;
312 else
313 VEC_INIT_EXPR_SLOT (sub) = to;
314 *expr_p = from;
316 /* The initialization is now a side-effect, so the container can
317 become void. */
318 if (from != sub)
319 TREE_TYPE (from) = void_type_node;
322 /* Handle aggregate NSDMI. */
323 replace_placeholders (sub, to);
325 if (t == sub)
326 break;
327 else
328 t = TREE_OPERAND (t, 1);
333 /* Gimplify a MUST_NOT_THROW_EXPR. */
335 static enum gimplify_status
336 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
338 tree stmt = *expr_p;
339 tree temp = voidify_wrapper_expr (stmt, NULL);
340 tree body = TREE_OPERAND (stmt, 0);
341 gimple_seq try_ = NULL;
342 gimple_seq catch_ = NULL;
343 gimple *mnt;
345 gimplify_and_add (body, &try_);
346 mnt = gimple_build_eh_must_not_throw (call_terminate_fn);
347 gimple_seq_add_stmt_without_update (&catch_, mnt);
348 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
350 gimple_seq_add_stmt_without_update (pre_p, mnt);
351 if (temp)
353 *expr_p = temp;
354 return GS_OK;
357 *expr_p = NULL;
358 return GS_ALL_DONE;
361 /* Return TRUE if an operand (OP) of a given TYPE being copied is
362 really just an empty class copy.
364 Check that the operand has a simple form so that TARGET_EXPRs and
365 non-empty CONSTRUCTORs get reduced properly, and we leave the
366 return slot optimization alone because it isn't a copy. */
368 bool
369 simple_empty_class_p (tree type, tree op, tree_code code)
371 if (TREE_CODE (op) == COMPOUND_EXPR)
372 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
373 if (SIMPLE_TARGET_EXPR_P (op)
374 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
375 /* The TARGET_EXPR is itself a simple copy, look through it. */
376 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
378 if (TREE_CODE (op) == PARM_DECL
379 && TREE_ADDRESSABLE (TREE_TYPE (op)))
381 tree fn = DECL_CONTEXT (op);
382 if (DECL_THUNK_P (fn)
383 || lambda_static_thunk_p (fn))
384 /* In a thunk, we pass through invisible reference parms, so this isn't
385 actually a copy. */
386 return false;
389 return
390 (TREE_CODE (op) == EMPTY_CLASS_EXPR
391 || code == MODIFY_EXPR
392 || is_gimple_lvalue (op)
393 || INDIRECT_REF_P (op)
394 || (TREE_CODE (op) == CONSTRUCTOR
395 && CONSTRUCTOR_NELTS (op) == 0)
396 || (TREE_CODE (op) == CALL_EXPR
397 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
398 && !TREE_CLOBBER_P (op)
399 && is_really_empty_class (type, /*ignore_vptr*/true);
402 /* Returns true if evaluating E as an lvalue has side-effects;
403 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
404 have side-effects until there is a read or write through it. */
406 static bool
407 lvalue_has_side_effects (tree e)
409 if (!TREE_SIDE_EFFECTS (e))
410 return false;
411 while (handled_component_p (e))
413 if (TREE_CODE (e) == ARRAY_REF
414 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
415 return true;
416 e = TREE_OPERAND (e, 0);
418 if (DECL_P (e))
419 /* Just naming a variable has no side-effects. */
420 return false;
421 else if (INDIRECT_REF_P (e))
422 /* Similarly, indirection has no side-effects. */
423 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
424 else
425 /* For anything else, trust TREE_SIDE_EFFECTS. */
426 return TREE_SIDE_EFFECTS (e);
429 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
430 by expressions with side-effects in other operands. */
432 static enum gimplify_status
433 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
434 bool (*gimple_test_f) (tree))
436 enum gimplify_status t
437 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
438 if (t == GS_ERROR)
439 return GS_ERROR;
440 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
441 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
442 return t;
445 /* Like gimplify_arg, but if ORDERED is set (which should be set if
446 any of the arguments this argument is sequenced before has
447 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
448 are gimplified into SSA_NAME or a fresh temporary and for
449 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
451 static enum gimplify_status
452 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
453 bool ordered)
455 enum gimplify_status t;
456 if (ordered
457 && !is_gimple_reg_type (TREE_TYPE (*arg_p))
458 && TREE_CODE (*arg_p) == TARGET_EXPR)
460 /* gimplify_arg would strip away the TARGET_EXPR, but
461 that can mean we don't copy the argument and some following
462 argument with side-effect could modify it. */
463 protected_set_expr_location (*arg_p, call_location);
464 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
466 else
468 t = gimplify_arg (arg_p, pre_p, call_location);
469 if (t == GS_ERROR)
470 return GS_ERROR;
471 else if (ordered
472 && is_gimple_reg_type (TREE_TYPE (*arg_p))
473 && is_gimple_variable (*arg_p)
474 && TREE_CODE (*arg_p) != SSA_NAME
475 /* No need to force references into register, references
476 can't be modified. */
477 && !TYPE_REF_P (TREE_TYPE (*arg_p))
478 /* And this can't be modified either. */
479 && *arg_p != current_class_ptr)
480 *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
481 return t;
486 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
489 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
491 int saved_stmts_are_full_exprs_p = 0;
492 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
493 enum tree_code code = TREE_CODE (*expr_p);
494 enum gimplify_status ret;
496 if (STATEMENT_CODE_P (code))
498 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
499 current_stmt_tree ()->stmts_are_full_exprs_p
500 = STMT_IS_FULL_EXPR_P (*expr_p);
503 switch (code)
505 case AGGR_INIT_EXPR:
506 simplify_aggr_init_expr (expr_p);
507 ret = GS_OK;
508 break;
510 case VEC_INIT_EXPR:
512 *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
513 tf_warning_or_error);
515 cp_fold_data data (ff_genericize | ff_mce_false);
516 cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
517 cp_genericize_tree (expr_p, false);
518 copy_if_shared (expr_p);
519 ret = GS_OK;
521 break;
523 case THROW_EXPR:
524 /* FIXME communicate throw type to back end, probably by moving
525 THROW_EXPR into ../tree.def. */
526 *expr_p = TREE_OPERAND (*expr_p, 0);
527 ret = GS_OK;
528 break;
530 case MUST_NOT_THROW_EXPR:
531 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
532 break;
534 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
535 LHS of an assignment might also be involved in the RHS, as in bug
536 25979. */
537 case INIT_EXPR:
538 cp_gimplify_init_expr (expr_p);
539 if (TREE_CODE (*expr_p) != INIT_EXPR)
540 return GS_OK;
541 /* Fall through. */
542 case MODIFY_EXPR:
543 modify_expr_case:
545 /* If the back end isn't clever enough to know that the lhs and rhs
546 types are the same, add an explicit conversion. */
547 tree op0 = TREE_OPERAND (*expr_p, 0);
548 tree op1 = TREE_OPERAND (*expr_p, 1);
550 if (!error_operand_p (op0)
551 && !error_operand_p (op1)
552 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
553 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
554 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
555 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
556 TREE_TYPE (op0), op1);
558 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
560 while (TREE_CODE (op1) == TARGET_EXPR)
561 /* We're disconnecting the initializer from its target,
562 don't create a temporary. */
563 op1 = TARGET_EXPR_INITIAL (op1);
565 /* Remove any copies of empty classes. Also drop volatile
566 variables on the RHS to avoid infinite recursion from
567 gimplify_expr trying to load the value. */
568 if (TREE_SIDE_EFFECTS (op1))
570 if (TREE_THIS_VOLATILE (op1)
571 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
572 op1 = build_fold_addr_expr (op1);
574 gimplify_and_add (op1, pre_p);
576 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
577 is_gimple_lvalue, fb_lvalue);
578 *expr_p = TREE_OPERAND (*expr_p, 0);
579 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
580 /* Avoid 'return *<retval>;' */
581 *expr_p = TREE_OPERAND (*expr_p, 0);
583 /* P0145 says that the RHS is sequenced before the LHS.
584 gimplify_modify_expr gimplifies the RHS before the LHS, but that
585 isn't quite strong enough in two cases:
587 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
588 mean it's evaluated after the LHS.
590 2) the value calculation of the RHS is also sequenced before the
591 LHS, so for scalar assignment we need to preevaluate if the
592 RHS could be affected by LHS side-effects even if it has no
593 side-effects of its own. We don't need this for classes because
594 class assignment takes its RHS by reference. */
595 else if (flag_strong_eval_order > 1
596 && TREE_CODE (*expr_p) == MODIFY_EXPR
597 && lvalue_has_side_effects (op0)
598 && (TREE_CODE (op1) == CALL_EXPR
599 || (SCALAR_TYPE_P (TREE_TYPE (op1))
600 && !TREE_CONSTANT (op1))))
601 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
603 ret = GS_OK;
604 break;
606 case EMPTY_CLASS_EXPR:
607 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
608 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
609 ret = GS_OK;
610 break;
612 case BASELINK:
613 *expr_p = BASELINK_FUNCTIONS (*expr_p);
614 ret = GS_OK;
615 break;
617 case TRY_BLOCK:
618 genericize_try_block (expr_p);
619 ret = GS_OK;
620 break;
622 case HANDLER:
623 genericize_catch_block (expr_p);
624 ret = GS_OK;
625 break;
627 case EH_SPEC_BLOCK:
628 genericize_eh_spec_block (expr_p);
629 ret = GS_OK;
630 break;
632 case USING_STMT:
633 gcc_unreachable ();
635 case FOR_STMT:
636 case WHILE_STMT:
637 case DO_STMT:
638 case SWITCH_STMT:
639 case CONTINUE_STMT:
640 case BREAK_STMT:
641 gcc_unreachable ();
643 case OMP_FOR:
644 case OMP_SIMD:
645 case OMP_DISTRIBUTE:
646 case OMP_LOOP:
647 case OMP_TASKLOOP:
648 ret = cp_gimplify_omp_for (expr_p, pre_p);
649 break;
651 case EXPR_STMT:
652 gimplify_expr_stmt (expr_p);
653 ret = GS_OK;
654 break;
656 case UNARY_PLUS_EXPR:
658 tree arg = TREE_OPERAND (*expr_p, 0);
659 tree type = TREE_TYPE (*expr_p);
660 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
661 : arg;
662 ret = GS_OK;
664 break;
666 case CALL_EXPR:
667 ret = GS_OK;
668 if (flag_strong_eval_order == 2
669 && CALL_EXPR_FN (*expr_p)
670 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
671 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
673 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
674 enum gimplify_status t
675 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
676 is_gimple_call_addr);
677 if (t == GS_ERROR)
678 ret = GS_ERROR;
679 /* GIMPLE considers most pointer conversion useless, but for
680 calls we actually care about the exact function pointer type. */
681 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
682 CALL_EXPR_FN (*expr_p)
683 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
685 if (!CALL_EXPR_FN (*expr_p))
686 /* Internal function call. */;
687 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
689 /* This is a call to a (compound) assignment operator that used
690 the operator syntax; gimplify the RHS first. */
691 gcc_assert (call_expr_nargs (*expr_p) == 2);
692 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
693 enum gimplify_status t
694 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
695 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
696 if (t == GS_ERROR)
697 ret = GS_ERROR;
699 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
701 /* Leave the last argument for gimplify_call_expr, to avoid problems
702 with __builtin_va_arg_pack(). */
703 int nargs = call_expr_nargs (*expr_p) - 1;
704 int last_side_effects_arg = -1;
705 for (int i = nargs; i > 0; --i)
706 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
708 last_side_effects_arg = i;
709 break;
711 for (int i = 0; i < nargs; ++i)
713 enum gimplify_status t
714 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
715 i < last_side_effects_arg);
716 if (t == GS_ERROR)
717 ret = GS_ERROR;
720 else if (flag_strong_eval_order
721 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
723 /* If flag_strong_eval_order, evaluate the object argument first. */
724 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
725 if (INDIRECT_TYPE_P (fntype))
726 fntype = TREE_TYPE (fntype);
727 if (TREE_CODE (fntype) == METHOD_TYPE)
729 int nargs = call_expr_nargs (*expr_p);
730 bool side_effects = false;
731 for (int i = 1; i < nargs; ++i)
732 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
734 side_effects = true;
735 break;
737 enum gimplify_status t
738 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
739 side_effects);
740 if (t == GS_ERROR)
741 ret = GS_ERROR;
744 if (ret != GS_ERROR)
746 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
747 if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
748 switch (DECL_FE_FUNCTION_CODE (decl))
750 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
751 *expr_p = boolean_false_node;
752 break;
753 case CP_BUILT_IN_SOURCE_LOCATION:
754 *expr_p
755 = fold_builtin_source_location (*expr_p);
756 break;
757 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
758 *expr_p
759 = fold_builtin_is_corresponding_member
760 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
761 &CALL_EXPR_ARG (*expr_p, 0));
762 break;
763 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
764 *expr_p
765 = fold_builtin_is_pointer_inverconvertible_with_class
766 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
767 &CALL_EXPR_ARG (*expr_p, 0));
768 break;
769 default:
770 break;
773 break;
775 case TARGET_EXPR:
776 /* A TARGET_EXPR that expresses direct-initialization should have been
777 elided by cp_gimplify_init_expr. */
778 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
779 /* Likewise, but allow extra temps of trivial type so that
780 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
781 on the rhs of an assignment, as in constexpr-aggr1.C. */
782 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p)
783 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p)));
784 ret = GS_UNHANDLED;
785 break;
787 case PTRMEM_CST:
788 *expr_p = cplus_expand_constant (*expr_p);
789 if (TREE_CODE (*expr_p) == PTRMEM_CST)
790 ret = GS_ERROR;
791 else
792 ret = GS_OK;
793 break;
795 case RETURN_EXPR:
796 if (TREE_OPERAND (*expr_p, 0)
797 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
798 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
800 expr_p = &TREE_OPERAND (*expr_p, 0);
801 /* Avoid going through the INIT_EXPR case, which can
802 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
803 goto modify_expr_case;
805 /* Fall through. */
807 default:
808 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
809 break;
812 /* Restore saved state. */
813 if (STATEMENT_CODE_P (code))
814 current_stmt_tree ()->stmts_are_full_exprs_p
815 = saved_stmts_are_full_exprs_p;
817 return ret;
820 static inline bool
821 is_invisiref_parm (const_tree t)
823 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
824 && DECL_BY_REFERENCE (t));
827 /* A stable comparison routine for use with splay trees and DECLs. */
829 static int
830 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
832 tree a = (tree) xa;
833 tree b = (tree) xb;
835 return DECL_UID (a) - DECL_UID (b);
838 /* OpenMP context during genericization. */
840 struct cp_genericize_omp_taskreg
842 bool is_parallel;
843 bool default_shared;
844 struct cp_genericize_omp_taskreg *outer;
845 splay_tree variables;
848 /* Return true if genericization should try to determine if
849 DECL is firstprivate or shared within task regions. */
851 static bool
852 omp_var_to_track (tree decl)
854 tree type = TREE_TYPE (decl);
855 if (is_invisiref_parm (decl))
856 type = TREE_TYPE (type);
857 else if (TYPE_REF_P (type))
858 type = TREE_TYPE (type);
859 while (TREE_CODE (type) == ARRAY_TYPE)
860 type = TREE_TYPE (type);
861 if (type == error_mark_node || !CLASS_TYPE_P (type))
862 return false;
863 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
864 return false;
865 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
866 return false;
867 return true;
870 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
872 static void
873 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
875 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
876 (splay_tree_key) decl);
877 if (n == NULL)
879 int flags = OMP_CLAUSE_DEFAULT_SHARED;
880 if (omp_ctx->outer)
881 omp_cxx_notice_variable (omp_ctx->outer, decl);
882 if (!omp_ctx->default_shared)
884 struct cp_genericize_omp_taskreg *octx;
886 for (octx = omp_ctx->outer; octx; octx = octx->outer)
888 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
889 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
891 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
892 break;
894 if (octx->is_parallel)
895 break;
897 if (octx == NULL
898 && (TREE_CODE (decl) == PARM_DECL
899 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
900 && DECL_CONTEXT (decl) == current_function_decl)))
901 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
902 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
904 /* DECL is implicitly determined firstprivate in
905 the current task construct. Ensure copy ctor and
906 dtor are instantiated, because during gimplification
907 it will be already too late. */
908 tree type = TREE_TYPE (decl);
909 if (is_invisiref_parm (decl))
910 type = TREE_TYPE (type);
911 else if (TYPE_REF_P (type))
912 type = TREE_TYPE (type);
913 while (TREE_CODE (type) == ARRAY_TYPE)
914 type = TREE_TYPE (type);
915 get_copy_ctor (type, tf_none);
916 get_dtor (type, tf_none);
919 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
923 /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
924 not expected to elide, e.g. because unsafe_copy_elision_p is true. */
926 static bool
927 any_non_eliding_target_exprs (tree ctor)
929 for (const constructor_elt &e : *CONSTRUCTOR_ELTS (ctor))
931 if (TREE_CODE (e.value) == TARGET_EXPR
932 && !TARGET_EXPR_ELIDING_P (e.value))
933 return true;
935 return false;
938 /* If we might need to clean up a partially constructed object, break down the
939 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
940 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
941 the result. */
943 static void
944 cp_genericize_init (tree *replace, tree from, tree to)
946 tree init = NULL_TREE;
947 if (TREE_CODE (from) == VEC_INIT_EXPR)
948 init = expand_vec_init_expr (to, from, tf_warning_or_error);
949 else if (TREE_CODE (from) == CONSTRUCTOR
950 && TREE_SIDE_EFFECTS (from)
951 && ((flag_exceptions
952 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
953 || any_non_eliding_target_exprs (from)))
955 to = cp_stabilize_reference (to);
956 replace_placeholders (from, to);
957 init = split_nonconstant_init (to, from);
960 if (init)
962 if (*replace == from)
963 /* Make cp_gimplify_init_expr call replace_decl on this
964 TARGET_EXPR_INITIAL. */
965 init = fold_convert (void_type_node, init);
966 *replace = init;
970 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
972 static void
973 cp_genericize_init_expr (tree *stmt_p)
975 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
976 tree to = TREE_OPERAND (*stmt_p, 0);
977 tree from = TREE_OPERAND (*stmt_p, 1);
978 if (SIMPLE_TARGET_EXPR_P (from)
979 /* Return gets confused if we clobber its INIT_EXPR this soon. */
980 && TREE_CODE (to) != RESULT_DECL)
981 from = TARGET_EXPR_INITIAL (from);
982 cp_genericize_init (stmt_p, from, to);
985 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
986 replace_decl later when we know what we're initializing. */
988 static void
989 cp_genericize_target_expr (tree *stmt_p)
991 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
992 tree slot = TARGET_EXPR_SLOT (*stmt_p);
993 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
994 TARGET_EXPR_INITIAL (*stmt_p), slot);
995 gcc_assert (!DECL_INITIAL (slot));
998 /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
999 TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1000 replacement when cp_folding TARGET_EXPR to preserve the invariant that
1001 AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1003 bool
1004 maybe_replace_decl (tree *tp, tree decl, tree replacement)
1006 if (!*tp || !VOID_TYPE_P (TREE_TYPE (*tp)))
1007 return false;
1008 tree t = *tp;
1009 while (TREE_CODE (t) == COMPOUND_EXPR)
1010 t = TREE_OPERAND (t, 1);
1011 if (TREE_CODE (t) == AGGR_INIT_EXPR)
1012 replace_decl (&AGGR_INIT_EXPR_SLOT (t), decl, replacement);
1013 else if (TREE_CODE (t) == VEC_INIT_EXPR)
1014 replace_decl (&VEC_INIT_EXPR_SLOT (t), decl, replacement);
1015 else
1016 replace_decl (tp, decl, replacement);
1017 return true;
1020 /* Genericization context. */
1022 struct cp_genericize_data
1024 hash_set<tree> *p_set;
1025 auto_vec<tree> bind_expr_stack;
1026 struct cp_genericize_omp_taskreg *omp_ctx;
1027 tree try_block;
1028 bool no_sanitize_p;
1029 bool handle_invisiref_parm_p;
1032 /* Perform any pre-gimplification folding of C++ front end trees to
1033 GENERIC.
1034 Note: The folding of non-omp cases is something to move into
1035 the middle-end. As for now we have most foldings only on GENERIC
1036 in fold-const, we need to perform this before transformation to
1037 GIMPLE-form. */
1039 static tree
1040 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
1042 cp_fold_data *data = (cp_fold_data*)data_;
1043 tree stmt = *stmt_p;
1044 enum tree_code code = TREE_CODE (stmt);
1046 switch (code)
1048 case PTRMEM_CST:
1049 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
1050 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
1052 if (!data->pset.add (stmt))
1053 error_at (PTRMEM_CST_LOCATION (stmt),
1054 "taking address of an immediate function %qD",
1055 PTRMEM_CST_MEMBER (stmt));
1056 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1057 break;
1059 break;
1061 case ADDR_EXPR:
1062 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
1063 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
1065 error_at (EXPR_LOCATION (stmt),
1066 "taking address of an immediate function %qD",
1067 TREE_OPERAND (stmt, 0));
1068 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1069 break;
1071 break;
1073 default:
1074 break;
1077 *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
1079 if (data->pset.add (stmt))
1081 /* Don't walk subtrees of stmts we've already walked once, otherwise
1082 we can have exponential complexity with e.g. lots of nested
1083 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1084 always the same tree, which the first time cp_fold_r has been
1085 called on it had the subtrees walked. */
1086 *walk_subtrees = 0;
1087 return NULL;
1090 code = TREE_CODE (stmt);
1091 switch (code)
1093 tree x;
1094 int i, n;
1095 case OMP_FOR:
1096 case OMP_SIMD:
1097 case OMP_DISTRIBUTE:
1098 case OMP_LOOP:
1099 case OMP_TASKLOOP:
1100 case OACC_LOOP:
1101 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1102 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1103 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1104 x = OMP_FOR_COND (stmt);
1105 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1107 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1108 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1110 else if (x && TREE_CODE (x) == TREE_VEC)
1112 n = TREE_VEC_LENGTH (x);
1113 for (i = 0; i < n; i++)
1115 tree o = TREE_VEC_ELT (x, i);
1116 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1117 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1120 x = OMP_FOR_INCR (stmt);
1121 if (x && TREE_CODE (x) == TREE_VEC)
1123 n = TREE_VEC_LENGTH (x);
1124 for (i = 0; i < n; i++)
1126 tree o = TREE_VEC_ELT (x, i);
1127 if (o && TREE_CODE (o) == MODIFY_EXPR)
1128 o = TREE_OPERAND (o, 1);
1129 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1130 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1132 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1133 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1137 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1138 *walk_subtrees = 0;
1139 return NULL;
1141 case IF_STMT:
1142 if (IF_STMT_CONSTEVAL_P (stmt))
1144 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1145 boolean_false_node. */
1146 cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1147 cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1148 *walk_subtrees = 0;
1149 return NULL;
1151 break;
1153 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1154 here rather than in cp_genericize to avoid problems with the invisible
1155 reference transition. */
1156 case INIT_EXPR:
1157 if (data->flags & ff_genericize)
1158 cp_genericize_init_expr (stmt_p);
1159 break;
1161 case TARGET_EXPR:
1162 if (data->flags & ff_genericize)
1163 cp_genericize_target_expr (stmt_p);
1165 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1166 that case, strip it in favor of this one. */
1167 if (tree &init = TARGET_EXPR_INITIAL (stmt))
1169 cp_walk_tree (&init, cp_fold_r, data, NULL);
1170 cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt), cp_fold_r, data, NULL);
1171 *walk_subtrees = 0;
1172 if (TREE_CODE (init) == TARGET_EXPR)
1174 tree sub = TARGET_EXPR_INITIAL (init);
1175 maybe_replace_decl (&sub, TARGET_EXPR_SLOT (init),
1176 TARGET_EXPR_SLOT (stmt));
1177 init = sub;
1180 break;
1182 default:
1183 break;
1186 return NULL;
1189 /* Fold ALL the trees! FIXME we should be able to remove this, but
1190 apparently that still causes optimization regressions. */
1192 void
1193 cp_fold_function (tree fndecl)
1195 /* By now all manifestly-constant-evaluated expressions will have
1196 been constant-evaluated already if possible, so we can safely
1197 pass ff_mce_false. */
1198 cp_fold_data data (ff_genericize | ff_mce_false);
1199 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1202 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1204 static tree genericize_spaceship (tree expr)
1206 iloc_sentinel s (cp_expr_location (expr));
1207 tree type = TREE_TYPE (expr);
1208 tree op0 = TREE_OPERAND (expr, 0);
1209 tree op1 = TREE_OPERAND (expr, 1);
1210 return genericize_spaceship (input_location, type, op0, op1);
1213 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1214 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1215 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1216 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1218 tree
1219 predeclare_vla (tree expr)
1221 tree type = TREE_TYPE (expr);
1222 if (type == error_mark_node)
1223 return expr;
1224 if (is_typedef_decl (expr))
1225 type = DECL_ORIGINAL_TYPE (expr);
1227 /* We need to strip pointers for gimplify_type_sizes. */
1228 tree vla = type;
1229 while (POINTER_TYPE_P (vla))
1231 if (TYPE_NAME (vla))
1232 return expr;
1233 vla = TREE_TYPE (vla);
1235 if (vla == type || TYPE_NAME (vla)
1236 || !variably_modified_type_p (vla, NULL_TREE))
1237 return expr;
1239 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1240 DECL_ARTIFICIAL (decl) = 1;
1241 TYPE_NAME (vla) = decl;
1242 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1243 if (DECL_P (expr))
1245 add_stmt (dexp);
1246 return NULL_TREE;
1248 else
1250 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1251 return expr;
1255 /* Perform any pre-gimplification lowering of C++ front end trees to
1256 GENERIC. */
1258 static tree
1259 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1261 tree stmt = *stmt_p;
1262 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1263 hash_set<tree> *p_set = wtd->p_set;
1265 /* If in an OpenMP context, note var uses. */
1266 if (UNLIKELY (wtd->omp_ctx != NULL)
1267 && (VAR_P (stmt)
1268 || TREE_CODE (stmt) == PARM_DECL
1269 || TREE_CODE (stmt) == RESULT_DECL)
1270 && omp_var_to_track (stmt))
1271 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1273 /* Don't dereference parms in a thunk, pass the references through. */
1274 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1275 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1277 *walk_subtrees = 0;
1278 return NULL;
1281 /* Dereference invisible reference parms. */
1282 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1284 *stmt_p = convert_from_reference (stmt);
1285 p_set->add (*stmt_p);
1286 *walk_subtrees = 0;
1287 return NULL;
1290 /* Map block scope extern declarations to visible declarations with the
1291 same name and type in outer scopes if any. */
1292 if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1293 if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1295 if (alias != error_mark_node)
1297 *stmt_p = alias;
1298 TREE_USED (alias) |= TREE_USED (stmt);
1300 *walk_subtrees = 0;
1301 return NULL;
1304 if (TREE_CODE (stmt) == INTEGER_CST
1305 && TYPE_REF_P (TREE_TYPE (stmt))
1306 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1307 && !wtd->no_sanitize_p)
1309 ubsan_maybe_instrument_reference (stmt_p);
1310 if (*stmt_p != stmt)
1312 *walk_subtrees = 0;
1313 return NULL_TREE;
1317 /* Other than invisiref parms, don't walk the same tree twice. */
1318 if (p_set->contains (stmt))
1320 *walk_subtrees = 0;
1321 return NULL_TREE;
1324 switch (TREE_CODE (stmt))
1326 case ADDR_EXPR:
1327 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1329 /* If in an OpenMP context, note var uses. */
1330 if (UNLIKELY (wtd->omp_ctx != NULL)
1331 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1332 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1333 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1334 *walk_subtrees = 0;
1336 break;
1338 case RETURN_EXPR:
1339 if (TREE_OPERAND (stmt, 0))
1341 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1342 /* Don't dereference an invisiref RESULT_DECL inside a
1343 RETURN_EXPR. */
1344 *walk_subtrees = 0;
1345 if (RETURN_EXPR_LOCAL_ADDR_P (stmt))
1347 /* Don't return the address of a local variable. */
1348 tree *p = &TREE_OPERAND (stmt, 0);
1349 while (TREE_CODE (*p) == COMPOUND_EXPR)
1350 p = &TREE_OPERAND (*p, 0);
1351 if (TREE_CODE (*p) == INIT_EXPR)
1353 tree op = TREE_OPERAND (*p, 1);
1354 tree new_op = build2 (COMPOUND_EXPR, TREE_TYPE (op), op,
1355 build_zero_cst (TREE_TYPE (op)));
1356 TREE_OPERAND (*p, 1) = new_op;
1360 break;
1362 case OMP_CLAUSE:
1363 switch (OMP_CLAUSE_CODE (stmt))
1365 case OMP_CLAUSE_LASTPRIVATE:
1366 /* Don't dereference an invisiref in OpenMP clauses. */
1367 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1369 *walk_subtrees = 0;
1370 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1371 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1372 cp_genericize_r, data, NULL);
1374 break;
1375 case OMP_CLAUSE_PRIVATE:
1376 /* Don't dereference an invisiref in OpenMP clauses. */
1377 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1378 *walk_subtrees = 0;
1379 else if (wtd->omp_ctx != NULL)
1381 /* Private clause doesn't cause any references to the
1382 var in outer contexts, avoid calling
1383 omp_cxx_notice_variable for it. */
1384 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1385 wtd->omp_ctx = NULL;
1386 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1387 data, NULL);
1388 wtd->omp_ctx = old;
1389 *walk_subtrees = 0;
1391 break;
1392 case OMP_CLAUSE_SHARED:
1393 case OMP_CLAUSE_FIRSTPRIVATE:
1394 case OMP_CLAUSE_COPYIN:
1395 case OMP_CLAUSE_COPYPRIVATE:
1396 case OMP_CLAUSE_INCLUSIVE:
1397 case OMP_CLAUSE_EXCLUSIVE:
1398 /* Don't dereference an invisiref in OpenMP clauses. */
1399 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1400 *walk_subtrees = 0;
1401 break;
1402 case OMP_CLAUSE_REDUCTION:
1403 case OMP_CLAUSE_IN_REDUCTION:
1404 case OMP_CLAUSE_TASK_REDUCTION:
1405 /* Don't dereference an invisiref in reduction clause's
1406 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1407 still needs to be genericized. */
1408 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1410 *walk_subtrees = 0;
1411 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1412 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1413 cp_genericize_r, data, NULL);
1414 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1415 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1416 cp_genericize_r, data, NULL);
1418 break;
1419 default:
1420 break;
1422 break;
1424 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1425 to lower this construct before scanning it, so we need to lower these
1426 before doing anything else. */
1427 case CLEANUP_STMT:
1428 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1429 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1430 : TRY_FINALLY_EXPR,
1431 void_type_node,
1432 CLEANUP_BODY (stmt),
1433 CLEANUP_EXPR (stmt));
1434 break;
1436 case IF_STMT:
1437 genericize_if_stmt (stmt_p);
1438 /* *stmt_p has changed, tail recurse to handle it again. */
1439 return cp_genericize_r (stmt_p, walk_subtrees, data);
1441 /* COND_EXPR might have incompatible types in branches if one or both
1442 arms are bitfields. Fix it up now. */
1443 case COND_EXPR:
1445 tree type_left
1446 = (TREE_OPERAND (stmt, 1)
1447 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1448 : NULL_TREE);
1449 tree type_right
1450 = (TREE_OPERAND (stmt, 2)
1451 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1452 : NULL_TREE);
1453 if (type_left
1454 && !useless_type_conversion_p (TREE_TYPE (stmt),
1455 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1457 TREE_OPERAND (stmt, 1)
1458 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1459 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1460 type_left));
1462 if (type_right
1463 && !useless_type_conversion_p (TREE_TYPE (stmt),
1464 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1466 TREE_OPERAND (stmt, 2)
1467 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1468 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1469 type_right));
1472 break;
1474 case BIND_EXPR:
1475 if (UNLIKELY (wtd->omp_ctx != NULL))
1477 tree decl;
1478 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1479 if (VAR_P (decl)
1480 && !DECL_EXTERNAL (decl)
1481 && omp_var_to_track (decl))
1483 splay_tree_node n
1484 = splay_tree_lookup (wtd->omp_ctx->variables,
1485 (splay_tree_key) decl);
1486 if (n == NULL)
1487 splay_tree_insert (wtd->omp_ctx->variables,
1488 (splay_tree_key) decl,
1489 TREE_STATIC (decl)
1490 ? OMP_CLAUSE_DEFAULT_SHARED
1491 : OMP_CLAUSE_DEFAULT_PRIVATE);
1494 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1496 /* The point here is to not sanitize static initializers. */
1497 bool no_sanitize_p = wtd->no_sanitize_p;
1498 wtd->no_sanitize_p = true;
1499 for (tree decl = BIND_EXPR_VARS (stmt);
1500 decl;
1501 decl = DECL_CHAIN (decl))
1502 if (VAR_P (decl)
1503 && TREE_STATIC (decl)
1504 && DECL_INITIAL (decl))
1505 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1506 wtd->no_sanitize_p = no_sanitize_p;
1508 wtd->bind_expr_stack.safe_push (stmt);
1509 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1510 cp_genericize_r, data, NULL);
1511 wtd->bind_expr_stack.pop ();
1512 break;
1514 case ASSERTION_STMT:
1515 case PRECONDITION_STMT:
1516 case POSTCONDITION_STMT:
1518 if (tree check = build_contract_check (stmt))
1520 *stmt_p = check;
1521 return cp_genericize_r (stmt_p, walk_subtrees, data);
1524 /* If we didn't build a check, replace it with void_node so we don't
1525 leak contracts into GENERIC. */
1526 *stmt_p = void_node;
1527 *walk_subtrees = 0;
1529 break;
1531 case USING_STMT:
1533 tree block = NULL_TREE;
1535 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1536 BLOCK, and append an IMPORTED_DECL to its
1537 BLOCK_VARS chained list. */
1538 if (wtd->bind_expr_stack.exists ())
1540 int i;
1541 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1542 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1543 break;
1545 if (block)
1547 tree decl = TREE_OPERAND (stmt, 0);
1548 gcc_assert (decl);
1550 if (undeduced_auto_decl (decl))
1551 /* Omit from the GENERIC, the back-end can't handle it. */;
1552 else
1554 tree using_directive = make_node (IMPORTED_DECL);
1555 TREE_TYPE (using_directive) = void_type_node;
1556 DECL_CONTEXT (using_directive) = current_function_decl;
1557 DECL_SOURCE_LOCATION (using_directive)
1558 = cp_expr_loc_or_input_loc (stmt);
1560 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1561 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1562 BLOCK_VARS (block) = using_directive;
1565 /* The USING_STMT won't appear in GENERIC. */
1566 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1567 *walk_subtrees = 0;
1569 break;
1571 case DECL_EXPR:
1572 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1574 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1575 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1576 *walk_subtrees = 0;
1578 else
1580 tree d = DECL_EXPR_DECL (stmt);
1581 if (VAR_P (d))
1582 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1584 break;
1586 case OMP_PARALLEL:
1587 case OMP_TASK:
1588 case OMP_TASKLOOP:
1590 struct cp_genericize_omp_taskreg omp_ctx;
1591 tree c, decl;
1592 splay_tree_node n;
1594 *walk_subtrees = 0;
1595 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1596 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1597 omp_ctx.default_shared = omp_ctx.is_parallel;
1598 omp_ctx.outer = wtd->omp_ctx;
1599 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1600 wtd->omp_ctx = &omp_ctx;
1601 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1602 switch (OMP_CLAUSE_CODE (c))
1604 case OMP_CLAUSE_SHARED:
1605 case OMP_CLAUSE_PRIVATE:
1606 case OMP_CLAUSE_FIRSTPRIVATE:
1607 case OMP_CLAUSE_LASTPRIVATE:
1608 decl = OMP_CLAUSE_DECL (c);
1609 if (decl == error_mark_node || !omp_var_to_track (decl))
1610 break;
1611 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1612 if (n != NULL)
1613 break;
1614 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1615 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1616 ? OMP_CLAUSE_DEFAULT_SHARED
1617 : OMP_CLAUSE_DEFAULT_PRIVATE);
1618 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1619 omp_cxx_notice_variable (omp_ctx.outer, decl);
1620 break;
1621 case OMP_CLAUSE_DEFAULT:
1622 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1623 omp_ctx.default_shared = true;
1624 default:
1625 break;
1627 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1628 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1629 cp_genericize_r, cp_walk_subtrees);
1630 else
1631 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1632 wtd->omp_ctx = omp_ctx.outer;
1633 splay_tree_delete (omp_ctx.variables);
1635 break;
1637 case OMP_TARGET:
1638 cfun->has_omp_target = true;
1639 break;
1641 case TRY_BLOCK:
1643 *walk_subtrees = 0;
1644 tree try_block = wtd->try_block;
1645 wtd->try_block = stmt;
1646 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1647 wtd->try_block = try_block;
1648 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1650 break;
1652 case MUST_NOT_THROW_EXPR:
1653 /* MUST_NOT_THROW_COND might be something else with TM. */
1654 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1656 *walk_subtrees = 0;
1657 tree try_block = wtd->try_block;
1658 wtd->try_block = stmt;
1659 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1660 wtd->try_block = try_block;
1662 break;
1664 case THROW_EXPR:
1666 location_t loc = location_of (stmt);
1667 if (warning_suppressed_p (stmt /* What warning? */))
1668 /* Never mind. */;
1669 else if (wtd->try_block)
1671 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1673 auto_diagnostic_group d;
1674 if (warning_at (loc, OPT_Wterminate,
1675 "%<throw%> will always call %<terminate%>")
1676 && cxx_dialect >= cxx11
1677 && DECL_DESTRUCTOR_P (current_function_decl))
1678 inform (loc, "in C++11 destructors default to %<noexcept%>");
1681 else
1683 if (warn_cxx11_compat && cxx_dialect < cxx11
1684 && DECL_DESTRUCTOR_P (current_function_decl)
1685 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1686 == NULL_TREE)
1687 && (get_defaulted_eh_spec (current_function_decl)
1688 == empty_except_spec))
1689 warning_at (loc, OPT_Wc__11_compat,
1690 "in C++11 this %<throw%> will call %<terminate%> "
1691 "because destructors default to %<noexcept%>");
1694 break;
1696 case CONVERT_EXPR:
1697 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt)));
1698 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1699 break;
1701 case SPACESHIP_EXPR:
1702 *stmt_p = genericize_spaceship (*stmt_p);
1703 break;
1705 case PTRMEM_CST:
1706 /* By the time we get here we're handing off to the back end, so we don't
1707 need or want to preserve PTRMEM_CST anymore. */
1708 *stmt_p = cplus_expand_constant (stmt);
1709 *walk_subtrees = 0;
1710 break;
1712 case MEM_REF:
1713 /* For MEM_REF, make sure not to sanitize the second operand even
1714 if it has reference type. It is just an offset with a type
1715 holding other information. There is no other processing we
1716 need to do for INTEGER_CSTs, so just ignore the second argument
1717 unconditionally. */
1718 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1719 *walk_subtrees = 0;
1720 break;
1722 case NOP_EXPR:
1723 *stmt_p = predeclare_vla (*stmt_p);
1724 if (!wtd->no_sanitize_p
1725 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1726 && TYPE_REF_P (TREE_TYPE (stmt)))
1727 ubsan_maybe_instrument_reference (stmt_p);
1728 break;
1730 case CALL_EXPR:
1731 /* Evaluate function concept checks instead of treating them as
1732 normal functions. */
1733 if (concept_check_p (stmt))
1735 *stmt_p = evaluate_concept_check (stmt);
1736 * walk_subtrees = 0;
1737 break;
1740 if (!wtd->no_sanitize_p
1741 && sanitize_flags_p ((SANITIZE_NULL
1742 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1744 tree fn = CALL_EXPR_FN (stmt);
1745 if (fn != NULL_TREE
1746 && !error_operand_p (fn)
1747 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1748 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1750 bool is_ctor
1751 = TREE_CODE (fn) == ADDR_EXPR
1752 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1753 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1754 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1755 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1756 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1757 cp_ubsan_maybe_instrument_member_call (stmt);
1759 else if (fn == NULL_TREE
1760 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1761 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1762 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1763 *walk_subtrees = 0;
1765 /* Fall through. */
1766 case AGGR_INIT_EXPR:
1767 /* For calls to a multi-versioned function, overload resolution
1768 returns the function with the highest target priority, that is,
1769 the version that will checked for dispatching first. If this
1770 version is inlinable, a direct call to this version can be made
1771 otherwise the call should go through the dispatcher. */
1773 tree fn = cp_get_callee_fndecl_nofold (stmt);
1774 if (fn && DECL_FUNCTION_VERSIONED (fn)
1775 && (current_function_decl == NULL
1776 || !targetm.target_option.can_inline_p (current_function_decl,
1777 fn)))
1778 if (tree dis = get_function_version_dispatcher (fn))
1780 mark_versions_used (dis);
1781 dis = build_address (dis);
1782 if (TREE_CODE (stmt) == CALL_EXPR)
1783 CALL_EXPR_FN (stmt) = dis;
1784 else
1785 AGGR_INIT_EXPR_FN (stmt) = dis;
1788 break;
1790 case TARGET_EXPR:
1791 if (TARGET_EXPR_INITIAL (stmt)
1792 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1793 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1794 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1795 break;
1797 case TEMPLATE_ID_EXPR:
1798 gcc_assert (concept_check_p (stmt));
1799 /* Emit the value of the concept check. */
1800 *stmt_p = evaluate_concept_check (stmt);
1801 walk_subtrees = 0;
1802 break;
1804 case OMP_DISTRIBUTE:
1805 /* Need to explicitly instantiate copy ctors on class iterators of
1806 composite distribute parallel for. */
1807 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1809 tree *data[4] = { NULL, NULL, NULL, NULL };
1810 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1811 find_combined_omp_for, data, NULL);
1812 if (inner != NULL_TREE
1813 && TREE_CODE (inner) == OMP_FOR)
1815 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1816 if (OMP_FOR_ORIG_DECLS (inner)
1817 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1818 i)) == TREE_LIST
1819 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1820 i)))
1822 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1823 /* Class iterators aren't allowed on OMP_SIMD, so the only
1824 case we need to solve is distribute parallel for. */
1825 gcc_assert (TREE_CODE (inner) == OMP_FOR
1826 && data[1]);
1827 tree orig_decl = TREE_PURPOSE (orig);
1828 tree c, cl = NULL_TREE;
1829 for (c = OMP_FOR_CLAUSES (inner);
1830 c; c = OMP_CLAUSE_CHAIN (c))
1831 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1832 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1833 && OMP_CLAUSE_DECL (c) == orig_decl)
1835 cl = c;
1836 break;
1838 if (cl == NULL_TREE)
1840 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1841 c; c = OMP_CLAUSE_CHAIN (c))
1842 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1843 && OMP_CLAUSE_DECL (c) == orig_decl)
1845 cl = c;
1846 break;
1849 if (cl)
1851 orig_decl = require_complete_type (orig_decl);
1852 tree inner_type = TREE_TYPE (orig_decl);
1853 if (orig_decl == error_mark_node)
1854 continue;
1855 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1856 inner_type = TREE_TYPE (inner_type);
1858 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1859 inner_type = TREE_TYPE (inner_type);
1860 get_copy_ctor (inner_type, tf_warning_or_error);
1865 /* FALLTHRU */
1867 case FOR_STMT:
1868 case WHILE_STMT:
1869 case DO_STMT:
1870 case SWITCH_STMT:
1871 case CONTINUE_STMT:
1872 case BREAK_STMT:
1873 case OMP_FOR:
1874 case OMP_SIMD:
1875 case OMP_LOOP:
1876 case OACC_LOOP:
1877 case STATEMENT_LIST:
1878 /* These cases are handled by shared code. */
1879 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1880 cp_genericize_r, cp_walk_subtrees);
1881 break;
1883 case BIT_CAST_EXPR:
1884 *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1885 TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1886 break;
1888 default:
1889 if (IS_TYPE_OR_DECL_P (stmt))
1890 *walk_subtrees = 0;
1891 break;
1894 p_set->add (*stmt_p);
1896 return NULL;
1899 /* Lower C++ front end trees to GENERIC in T_P. */
1901 static void
1902 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1904 struct cp_genericize_data wtd;
1906 wtd.p_set = new hash_set<tree>;
1907 wtd.bind_expr_stack.create (0);
1908 wtd.omp_ctx = NULL;
1909 wtd.try_block = NULL_TREE;
1910 wtd.no_sanitize_p = false;
1911 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1912 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1913 delete wtd.p_set;
1914 if (sanitize_flags_p (SANITIZE_VPTR))
1915 cp_ubsan_instrument_member_accesses (t_p);
1918 /* If a function that should end with a return in non-void
1919 function doesn't obviously end with return, add ubsan
1920 instrumentation code to verify it at runtime. If -fsanitize=return
1921 is not enabled, instrument __builtin_unreachable. */
1923 static void
1924 cp_maybe_instrument_return (tree fndecl)
1926 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1927 || DECL_CONSTRUCTOR_P (fndecl)
1928 || DECL_DESTRUCTOR_P (fndecl)
1929 || !targetm.warn_func_return (fndecl))
1930 return;
1932 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1933 /* Don't add __builtin_unreachable () if not optimizing, it will not
1934 improve any optimizations in that case, just break UB code.
1935 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1936 UBSan covers this with ubsan_instrument_return above where sufficient
1937 information is provided, while the __builtin_unreachable () below
1938 if return sanitization is disabled will just result in hard to
1939 understand runtime error without location. */
1940 && ((!optimize && !flag_unreachable_traps)
1941 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1942 return;
1944 tree t = DECL_SAVED_TREE (fndecl);
1945 while (t)
1947 switch (TREE_CODE (t))
1949 case BIND_EXPR:
1950 t = BIND_EXPR_BODY (t);
1951 continue;
1952 case TRY_FINALLY_EXPR:
1953 case CLEANUP_POINT_EXPR:
1954 t = TREE_OPERAND (t, 0);
1955 continue;
1956 case STATEMENT_LIST:
1958 tree_stmt_iterator i = tsi_last (t);
1959 while (!tsi_end_p (i))
1961 tree p = tsi_stmt (i);
1962 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1963 break;
1964 tsi_prev (&i);
1966 if (!tsi_end_p (i))
1968 t = tsi_stmt (i);
1969 continue;
1972 break;
1973 case RETURN_EXPR:
1974 return;
1975 default:
1976 break;
1978 break;
1980 if (t == NULL_TREE)
1981 return;
1982 tree *p = &DECL_SAVED_TREE (fndecl);
1983 if (TREE_CODE (*p) == BIND_EXPR)
1984 p = &BIND_EXPR_BODY (*p);
1986 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1987 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1988 t = ubsan_instrument_return (loc);
1989 else
1990 t = build_builtin_unreachable (BUILTINS_LOCATION);
1992 append_to_statement_list (t, p);
1995 void
1996 cp_genericize (tree fndecl)
1998 tree t;
2000 /* Fix up the types of parms passed by invisible reference. */
2001 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
2002 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
2004 /* If a function's arguments are copied to create a thunk,
2005 then DECL_BY_REFERENCE will be set -- but the type of the
2006 argument will be a pointer type, so we will never get
2007 here. */
2008 gcc_assert (!DECL_BY_REFERENCE (t));
2009 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
2010 TREE_TYPE (t) = DECL_ARG_TYPE (t);
2011 DECL_BY_REFERENCE (t) = 1;
2012 TREE_ADDRESSABLE (t) = 0;
2013 relayout_decl (t);
2016 /* Do the same for the return value. */
2017 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
2019 t = DECL_RESULT (fndecl);
2020 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
2021 DECL_BY_REFERENCE (t) = 1;
2022 TREE_ADDRESSABLE (t) = 0;
2023 relayout_decl (t);
2024 if (DECL_NAME (t))
2026 /* Adjust DECL_VALUE_EXPR of the original var. */
2027 tree outer = outer_curly_brace_block (current_function_decl);
2028 tree var;
2030 if (outer)
2031 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2032 if (VAR_P (var)
2033 && DECL_NAME (t) == DECL_NAME (var)
2034 && DECL_HAS_VALUE_EXPR_P (var)
2035 && DECL_VALUE_EXPR (var) == t)
2037 tree val = convert_from_reference (t);
2038 SET_DECL_VALUE_EXPR (var, val);
2039 break;
2044 /* If we're a clone, the body is already GIMPLE. */
2045 if (DECL_CLONED_FUNCTION_P (fndecl))
2046 return;
2048 /* Allow cp_genericize calls to be nested. */
2049 bc_state_t save_state;
2050 save_bc_state (&save_state);
2052 /* We do want to see every occurrence of the parms, so we can't just use
2053 walk_tree's hash functionality. */
2054 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
2056 cp_maybe_instrument_return (fndecl);
2058 /* Do everything else. */
2059 c_genericize (fndecl);
2060 restore_bc_state (&save_state);
2063 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2064 NULL if there is in fact nothing to do. ARG2 may be null if FN
2065 actually only takes one argument. */
2067 static tree
2068 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2070 tree defparm, parm, t;
2071 int i = 0;
2072 int nargs;
2073 tree *argarray;
2075 if (fn == NULL)
2076 return NULL;
2078 nargs = list_length (DECL_ARGUMENTS (fn));
2079 argarray = XALLOCAVEC (tree, nargs);
2081 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2082 if (arg2)
2083 defparm = TREE_CHAIN (defparm);
2085 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
2086 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2088 tree inner_type = TREE_TYPE (arg1);
2089 tree start1, end1, p1;
2090 tree start2 = NULL, p2 = NULL;
2091 tree ret = NULL, lab;
2093 start1 = arg1;
2094 start2 = arg2;
2097 inner_type = TREE_TYPE (inner_type);
2098 start1 = build4 (ARRAY_REF, inner_type, start1,
2099 size_zero_node, NULL, NULL);
2100 if (arg2)
2101 start2 = build4 (ARRAY_REF, inner_type, start2,
2102 size_zero_node, NULL, NULL);
2104 while (TREE_CODE (inner_type) == ARRAY_TYPE);
2105 start1 = build_fold_addr_expr_loc (input_location, start1);
2106 if (arg2)
2107 start2 = build_fold_addr_expr_loc (input_location, start2);
2109 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2110 end1 = fold_build_pointer_plus (start1, end1);
2112 p1 = create_tmp_var (TREE_TYPE (start1));
2113 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2114 append_to_statement_list (t, &ret);
2116 if (arg2)
2118 p2 = create_tmp_var (TREE_TYPE (start2));
2119 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2120 append_to_statement_list (t, &ret);
2123 lab = create_artificial_label (input_location);
2124 t = build1 (LABEL_EXPR, void_type_node, lab);
2125 append_to_statement_list (t, &ret);
2127 argarray[i++] = p1;
2128 if (arg2)
2129 argarray[i++] = p2;
2130 /* Handle default arguments. */
2131 for (parm = defparm; parm && parm != void_list_node;
2132 parm = TREE_CHAIN (parm), i++)
2133 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2134 TREE_PURPOSE (parm), fn,
2135 i - is_method, tf_warning_or_error);
2136 t = build_call_a (fn, i, argarray);
2137 t = fold_convert (void_type_node, t);
2138 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2139 append_to_statement_list (t, &ret);
2141 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2142 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2143 append_to_statement_list (t, &ret);
2145 if (arg2)
2147 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2148 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2149 append_to_statement_list (t, &ret);
2152 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2153 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2154 append_to_statement_list (t, &ret);
2156 return ret;
2158 else
2160 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2161 if (arg2)
2162 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2163 /* Handle default arguments. */
2164 for (parm = defparm; parm && parm != void_list_node;
2165 parm = TREE_CHAIN (parm), i++)
2166 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2167 TREE_PURPOSE (parm), fn,
2168 i - is_method, tf_warning_or_error);
2169 t = build_call_a (fn, i, argarray);
2170 t = fold_convert (void_type_node, t);
2171 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2175 /* Return code to initialize DECL with its default constructor, or
2176 NULL if there's nothing to do. */
2178 tree
2179 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2181 tree info = CP_OMP_CLAUSE_INFO (clause);
2182 tree ret = NULL;
2184 if (info)
2185 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2187 return ret;
2190 /* Return code to initialize DST with a copy constructor from SRC. */
2192 tree
2193 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2195 tree info = CP_OMP_CLAUSE_INFO (clause);
2196 tree ret = NULL;
2198 if (info)
2199 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2200 if (ret == NULL)
2201 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2203 return ret;
2206 /* Similarly, except use an assignment operator instead. */
2208 tree
2209 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2211 tree info = CP_OMP_CLAUSE_INFO (clause);
2212 tree ret = NULL;
2214 if (info)
2215 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2216 if (ret == NULL)
2217 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2219 return ret;
2222 /* Return code to destroy DECL. */
2224 tree
2225 cxx_omp_clause_dtor (tree clause, tree decl)
2227 tree info = CP_OMP_CLAUSE_INFO (clause);
2228 tree ret = NULL;
2230 if (info)
2231 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2233 return ret;
2236 /* True if OpenMP should privatize what this DECL points to rather
2237 than the DECL itself. */
2239 bool
2240 cxx_omp_privatize_by_reference (const_tree decl)
2242 return (TYPE_REF_P (TREE_TYPE (decl))
2243 || is_invisiref_parm (decl));
2246 /* Return true if DECL is const qualified var having no mutable member. */
2247 bool
2248 cxx_omp_const_qual_no_mutable (tree decl)
2250 tree type = TREE_TYPE (decl);
2251 if (TYPE_REF_P (type))
2253 if (!is_invisiref_parm (decl))
2254 return false;
2255 type = TREE_TYPE (type);
2257 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2259 /* NVR doesn't preserve const qualification of the
2260 variable's type. */
2261 tree outer = outer_curly_brace_block (current_function_decl);
2262 tree var;
2264 if (outer)
2265 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2266 if (VAR_P (var)
2267 && DECL_NAME (decl) == DECL_NAME (var)
2268 && (TYPE_MAIN_VARIANT (type)
2269 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2271 if (TYPE_READONLY (TREE_TYPE (var)))
2272 type = TREE_TYPE (var);
2273 break;
2278 if (type == error_mark_node)
2279 return false;
2281 /* Variables with const-qualified type having no mutable member
2282 are predetermined shared. */
2283 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2284 return true;
2286 return false;
2289 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2290 of DECL is predetermined. */
2292 enum omp_clause_default_kind
2293 cxx_omp_predetermined_sharing_1 (tree decl)
2295 /* Static data members are predetermined shared. */
2296 if (TREE_STATIC (decl))
2298 tree ctx = CP_DECL_CONTEXT (decl);
2299 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2300 return OMP_CLAUSE_DEFAULT_SHARED;
2302 if (c_omp_predefined_variable (decl))
2303 return OMP_CLAUSE_DEFAULT_SHARED;
2306 /* this may not be specified in data-sharing clauses, still we need
2307 to predetermined it firstprivate. */
2308 if (decl == current_class_ptr)
2309 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2311 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2314 /* Likewise, but also include the artificial vars. We don't want to
2315 disallow the artificial vars being mentioned in explicit clauses,
2316 as we use artificial vars e.g. for loop constructs with random
2317 access iterators other than pointers, but during gimplification
2318 we want to treat them as predetermined. */
2320 enum omp_clause_default_kind
2321 cxx_omp_predetermined_sharing (tree decl)
2323 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2324 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2325 return ret;
2327 /* Predetermine artificial variables holding integral values, those
2328 are usually result of gimplify_one_sizepos or SAVE_EXPR
2329 gimplification. */
2330 if (VAR_P (decl)
2331 && DECL_ARTIFICIAL (decl)
2332 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2333 && !(DECL_LANG_SPECIFIC (decl)
2334 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2335 return OMP_CLAUSE_DEFAULT_SHARED;
2337 /* Similarly for typeinfo symbols. */
2338 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2339 return OMP_CLAUSE_DEFAULT_SHARED;
2341 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2344 enum omp_clause_defaultmap_kind
2345 cxx_omp_predetermined_mapping (tree decl)
2347 /* Predetermine artificial variables holding integral values, those
2348 are usually result of gimplify_one_sizepos or SAVE_EXPR
2349 gimplification. */
2350 if (VAR_P (decl)
2351 && DECL_ARTIFICIAL (decl)
2352 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2353 && !(DECL_LANG_SPECIFIC (decl)
2354 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2355 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2357 if (c_omp_predefined_variable (decl))
2358 return OMP_CLAUSE_DEFAULTMAP_TO;
2360 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2363 /* Finalize an implicitly determined clause. */
2365 void
2366 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2368 tree decl, inner_type;
2369 bool make_shared = false;
2371 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2372 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2373 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2374 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2375 return;
2377 decl = OMP_CLAUSE_DECL (c);
2378 decl = require_complete_type (decl);
2379 inner_type = TREE_TYPE (decl);
2380 if (decl == error_mark_node)
2381 make_shared = true;
2382 else if (TYPE_REF_P (TREE_TYPE (decl)))
2383 inner_type = TREE_TYPE (inner_type);
2385 /* We're interested in the base element, not arrays. */
2386 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2387 inner_type = TREE_TYPE (inner_type);
2389 /* Check for special function availability by building a call to one.
2390 Save the results, because later we won't be in the right context
2391 for making these queries. */
2392 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2393 bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2394 if (!make_shared
2395 && CLASS_TYPE_P (inner_type)
2396 && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2397 true))
2398 make_shared = true;
2400 if (make_shared)
2402 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2403 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2404 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2408 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2409 disregarded in OpenMP construct, because it is going to be
2410 remapped during OpenMP lowering. SHARED is true if DECL
2411 is going to be shared, false if it is going to be privatized. */
2413 bool
2414 cxx_omp_disregard_value_expr (tree decl, bool shared)
2416 if (shared)
2417 return false;
2418 if (VAR_P (decl)
2419 && DECL_HAS_VALUE_EXPR_P (decl)
2420 && DECL_ARTIFICIAL (decl)
2421 && DECL_LANG_SPECIFIC (decl)
2422 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2423 return true;
2424 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2425 return true;
2426 return false;
2429 /* Fold expression X which is used as an rvalue if RVAL is true. */
2431 static tree
2432 cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
2434 while (true)
2436 x = cp_fold (x, flags);
2437 if (rval)
2438 x = mark_rvalue_use (x);
2439 if (rval && DECL_P (x)
2440 && !TYPE_REF_P (TREE_TYPE (x)))
2442 tree v = decl_constant_value (x);
2443 if (v != x && v != error_mark_node)
2445 x = v;
2446 continue;
2449 break;
2451 return x;
2454 tree
2455 cp_fold_maybe_rvalue (tree x, bool rval)
2457 return cp_fold_maybe_rvalue (x, rval, ff_none);
2460 /* Fold expression X which is used as an rvalue. */
2462 static tree
2463 cp_fold_rvalue (tree x, fold_flags_t flags)
2465 return cp_fold_maybe_rvalue (x, true, flags);
2468 tree
2469 cp_fold_rvalue (tree x)
2471 return cp_fold_rvalue (x, ff_none);
2474 /* Perform folding on expression X. */
2476 static tree
2477 cp_fully_fold (tree x, mce_value manifestly_const_eval)
2479 if (processing_template_decl)
2480 return x;
2481 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2482 have to call both. */
2483 if (cxx_dialect >= cxx11)
2485 x = maybe_constant_value (x, /*decl=*/NULL_TREE, manifestly_const_eval);
2486 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2487 a TARGET_EXPR; undo that here. */
2488 if (TREE_CODE (x) == TARGET_EXPR)
2489 x = TARGET_EXPR_INITIAL (x);
2490 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2491 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2492 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2493 x = TREE_OPERAND (x, 0);
2495 fold_flags_t flags = ff_none;
2496 if (manifestly_const_eval == mce_false)
2497 flags |= ff_mce_false;
2498 return cp_fold_rvalue (x, flags);
2501 tree
2502 cp_fully_fold (tree x)
2504 return cp_fully_fold (x, mce_unknown);
2507 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2508 in some cases. */
2510 tree
2511 cp_fully_fold_init (tree x)
2513 if (processing_template_decl)
2514 return x;
2515 x = cp_fully_fold (x, mce_false);
2516 cp_fold_data data (ff_mce_false);
2517 cp_walk_tree (&x, cp_fold_r, &data, NULL);
2518 return x;
2521 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2522 and certain changes are made to the folding done. Or should be (FIXME). We
2523 never touch maybe_const, as it is only used for the C front-end
2524 C_MAYBE_CONST_EXPR. */
2526 tree
2527 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2529 return cp_fold_maybe_rvalue (x, !lval);
2532 static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
2534 /* Subroutine of cp_fold. Returns which fold cache to use according
2535 to the given flags. We need multiple caches since the result of
2536 folding may depend on which flags are used. */
2538 static hash_map<tree, tree> *&
2539 get_fold_cache (fold_flags_t flags)
2541 if (flags & ff_mce_false)
2542 return fold_caches[1];
2543 else
2544 return fold_caches[0];
2547 /* Dispose of the whole FOLD_CACHE. */
2549 void
2550 clear_fold_cache (void)
2552 for (auto& fold_cache : fold_caches)
2553 if (fold_cache != NULL)
2554 fold_cache->empty ();
2557 /* This function tries to fold an expression X.
2558 To avoid combinatorial explosion, folding results are kept in fold_cache.
2559 If X is invalid, we don't fold at all.
2560 For performance reasons we don't cache expressions representing a
2561 declaration or constant.
2562 Function returns X or its folded variant. */
2564 static tree
2565 cp_fold (tree x, fold_flags_t flags)
2567 tree op0, op1, op2, op3;
2568 tree org_x = x, r = NULL_TREE;
2569 enum tree_code code;
2570 location_t loc;
2571 bool rval_ops = true;
2573 if (!x || x == error_mark_node)
2574 return x;
2576 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2577 return x;
2579 /* Don't bother to cache DECLs or constants. */
2580 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2581 return x;
2583 auto& fold_cache = get_fold_cache (flags);
2584 if (fold_cache == NULL)
2585 fold_cache = hash_map<tree, tree>::create_ggc (101);
2587 if (tree *cached = fold_cache->get (x))
2588 return *cached;
2590 uid_sensitive_constexpr_evaluation_checker c;
2592 code = TREE_CODE (x);
2593 switch (code)
2595 case CLEANUP_POINT_EXPR:
2596 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2597 effects. */
2598 r = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2599 if (!TREE_SIDE_EFFECTS (r))
2600 x = r;
2601 break;
2603 case SIZEOF_EXPR:
2604 x = fold_sizeof_expr (x);
2605 break;
2607 case VIEW_CONVERT_EXPR:
2608 rval_ops = false;
2609 /* FALLTHRU */
2610 case NON_LVALUE_EXPR:
2611 CASE_CONVERT:
2613 if (VOID_TYPE_P (TREE_TYPE (x)))
2615 /* This is just to make sure we don't end up with casts to
2616 void from error_mark_node. If we just return x, then
2617 cp_fold_r might fold the operand into error_mark_node and
2618 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2619 during gimplification doesn't like such casts.
2620 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2621 folding of the operand should be in the caches and if in cp_fold_r
2622 it will modify it in place. */
2623 op0 = cp_fold (TREE_OPERAND (x, 0), flags);
2624 if (op0 == error_mark_node)
2625 x = error_mark_node;
2626 break;
2629 loc = EXPR_LOCATION (x);
2630 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2632 if (code == CONVERT_EXPR
2633 && SCALAR_TYPE_P (TREE_TYPE (x))
2634 && op0 != void_node)
2635 /* During parsing we used convert_to_*_nofold; re-convert now using the
2636 folding variants, since fold() doesn't do those transformations. */
2637 x = fold (convert (TREE_TYPE (x), op0));
2638 else if (op0 != TREE_OPERAND (x, 0))
2640 if (op0 == error_mark_node)
2641 x = error_mark_node;
2642 else
2643 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2645 else
2646 x = fold (x);
2648 /* Conversion of an out-of-range value has implementation-defined
2649 behavior; the language considers it different from arithmetic
2650 overflow, which is undefined. */
2651 if (TREE_CODE (op0) == INTEGER_CST
2652 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2653 TREE_OVERFLOW (x) = false;
2655 break;
2657 case EXCESS_PRECISION_EXPR:
2658 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2659 x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
2660 break;
2662 case INDIRECT_REF:
2663 /* We don't need the decltype(auto) obfuscation anymore. */
2664 if (REF_PARENTHESIZED_P (x))
2666 tree p = maybe_undo_parenthesized_ref (x);
2667 if (p != x)
2668 return cp_fold (p, flags);
2670 goto unary;
2672 case ADDR_EXPR:
2673 loc = EXPR_LOCATION (x);
2674 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
2676 /* Cope with user tricks that amount to offsetof. */
2677 if (op0 != error_mark_node
2678 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2680 tree val = get_base_address (op0);
2681 if (val
2682 && INDIRECT_REF_P (val)
2683 && COMPLETE_TYPE_P (TREE_TYPE (val))
2684 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2686 val = TREE_OPERAND (val, 0);
2687 STRIP_NOPS (val);
2688 val = maybe_constant_value (val);
2689 if (TREE_CODE (val) == INTEGER_CST)
2690 return fold_offsetof (op0, TREE_TYPE (x));
2693 goto finish_unary;
2695 case REALPART_EXPR:
2696 case IMAGPART_EXPR:
2697 rval_ops = false;
2698 /* FALLTHRU */
2699 case CONJ_EXPR:
2700 case FIX_TRUNC_EXPR:
2701 case FLOAT_EXPR:
2702 case NEGATE_EXPR:
2703 case ABS_EXPR:
2704 case ABSU_EXPR:
2705 case BIT_NOT_EXPR:
2706 case TRUTH_NOT_EXPR:
2707 case FIXED_CONVERT_EXPR:
2708 unary:
2710 loc = EXPR_LOCATION (x);
2711 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2713 finish_unary:
2714 if (op0 != TREE_OPERAND (x, 0))
2716 if (op0 == error_mark_node)
2717 x = error_mark_node;
2718 else
2720 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2721 if (code == INDIRECT_REF
2722 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2724 TREE_READONLY (x) = TREE_READONLY (org_x);
2725 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2726 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2730 else
2731 x = fold (x);
2733 gcc_assert (TREE_CODE (x) != COND_EXPR
2734 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2735 break;
2737 case UNARY_PLUS_EXPR:
2738 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2739 if (op0 == error_mark_node)
2740 x = error_mark_node;
2741 else
2742 x = fold_convert (TREE_TYPE (x), op0);
2743 break;
2745 case POSTDECREMENT_EXPR:
2746 case POSTINCREMENT_EXPR:
2747 case INIT_EXPR:
2748 case PREDECREMENT_EXPR:
2749 case PREINCREMENT_EXPR:
2750 case COMPOUND_EXPR:
2751 case MODIFY_EXPR:
2752 rval_ops = false;
2753 /* FALLTHRU */
2754 case POINTER_PLUS_EXPR:
2755 case PLUS_EXPR:
2756 case POINTER_DIFF_EXPR:
2757 case MINUS_EXPR:
2758 case MULT_EXPR:
2759 case TRUNC_DIV_EXPR:
2760 case CEIL_DIV_EXPR:
2761 case FLOOR_DIV_EXPR:
2762 case ROUND_DIV_EXPR:
2763 case TRUNC_MOD_EXPR:
2764 case CEIL_MOD_EXPR:
2765 case ROUND_MOD_EXPR:
2766 case RDIV_EXPR:
2767 case EXACT_DIV_EXPR:
2768 case MIN_EXPR:
2769 case MAX_EXPR:
2770 case LSHIFT_EXPR:
2771 case RSHIFT_EXPR:
2772 case LROTATE_EXPR:
2773 case RROTATE_EXPR:
2774 case BIT_AND_EXPR:
2775 case BIT_IOR_EXPR:
2776 case BIT_XOR_EXPR:
2777 case TRUTH_AND_EXPR:
2778 case TRUTH_ANDIF_EXPR:
2779 case TRUTH_OR_EXPR:
2780 case TRUTH_ORIF_EXPR:
2781 case TRUTH_XOR_EXPR:
2782 case LT_EXPR: case LE_EXPR:
2783 case GT_EXPR: case GE_EXPR:
2784 case EQ_EXPR: case NE_EXPR:
2785 case UNORDERED_EXPR: case ORDERED_EXPR:
2786 case UNLT_EXPR: case UNLE_EXPR:
2787 case UNGT_EXPR: case UNGE_EXPR:
2788 case UNEQ_EXPR: case LTGT_EXPR:
2789 case RANGE_EXPR: case COMPLEX_EXPR:
2791 loc = EXPR_LOCATION (x);
2792 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2793 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1), flags);
2795 /* decltype(nullptr) has only one value, so optimize away all comparisons
2796 with that type right away, keeping them in the IL causes troubles for
2797 various optimizations. */
2798 if (COMPARISON_CLASS_P (org_x)
2799 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2800 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2802 switch (code)
2804 case EQ_EXPR:
2805 x = constant_boolean_node (true, TREE_TYPE (x));
2806 break;
2807 case NE_EXPR:
2808 x = constant_boolean_node (false, TREE_TYPE (x));
2809 break;
2810 default:
2811 gcc_unreachable ();
2813 return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2814 op0, op1);
2817 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2819 if (op0 == error_mark_node || op1 == error_mark_node)
2820 x = error_mark_node;
2821 else
2822 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2824 else
2825 x = fold (x);
2827 /* This is only needed for -Wnonnull-compare and only if
2828 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2829 generation, we do it always. */
2830 if (COMPARISON_CLASS_P (org_x))
2832 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2834 else if (COMPARISON_CLASS_P (x))
2836 if (warn_nonnull_compare
2837 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2838 suppress_warning (x, OPT_Wnonnull_compare);
2840 /* Otherwise give up on optimizing these, let GIMPLE folders
2841 optimize those later on. */
2842 else if (op0 != TREE_OPERAND (org_x, 0)
2843 || op1 != TREE_OPERAND (org_x, 1))
2845 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2846 if (warn_nonnull_compare
2847 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2848 suppress_warning (x, OPT_Wnonnull_compare);
2850 else
2851 x = org_x;
2854 break;
2856 case VEC_COND_EXPR:
2857 case COND_EXPR:
2858 loc = EXPR_LOCATION (x);
2859 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2860 op1 = cp_fold (TREE_OPERAND (x, 1), flags);
2861 op2 = cp_fold (TREE_OPERAND (x, 2), flags);
2863 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2865 warning_sentinel s (warn_int_in_bool_context);
2866 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2867 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2868 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2869 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2871 else if (VOID_TYPE_P (TREE_TYPE (x)))
2873 if (TREE_CODE (op0) == INTEGER_CST)
2875 /* If the condition is constant, fold can fold away
2876 the COND_EXPR. If some statement-level uses of COND_EXPR
2877 have one of the branches NULL, avoid folding crash. */
2878 if (!op1)
2879 op1 = build_empty_stmt (loc);
2880 if (!op2)
2881 op2 = build_empty_stmt (loc);
2883 else
2885 /* Otherwise, don't bother folding a void condition, since
2886 it can't produce a constant value. */
2887 if (op0 != TREE_OPERAND (x, 0)
2888 || op1 != TREE_OPERAND (x, 1)
2889 || op2 != TREE_OPERAND (x, 2))
2890 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2891 break;
2895 if (op0 != TREE_OPERAND (x, 0)
2896 || op1 != TREE_OPERAND (x, 1)
2897 || op2 != TREE_OPERAND (x, 2))
2899 if (op0 == error_mark_node
2900 || op1 == error_mark_node
2901 || op2 == error_mark_node)
2902 x = error_mark_node;
2903 else
2904 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2906 else
2907 x = fold (x);
2909 /* A COND_EXPR might have incompatible types in branches if one or both
2910 arms are bitfields. If folding exposed such a branch, fix it up. */
2911 if (TREE_CODE (x) != code
2912 && x != error_mark_node
2913 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2914 x = fold_convert (TREE_TYPE (org_x), x);
2916 break;
2918 case CALL_EXPR:
2920 tree callee = get_callee_fndecl (x);
2922 /* "Inline" calls to std::move/forward and other cast-like functions
2923 by simply folding them into a corresponding cast to their return
2924 type. This is cheaper than relying on the middle end to do so, and
2925 also means we avoid generating useless debug info for them at all.
2927 At this point the argument has already been converted into a
2928 reference, so it suffices to use a NOP_EXPR to express the
2929 cast. */
2930 if ((OPTION_SET_P (flag_fold_simple_inlines)
2931 ? flag_fold_simple_inlines
2932 : !flag_no_inline)
2933 && call_expr_nargs (x) == 1
2934 && decl_in_std_namespace_p (callee)
2935 && DECL_NAME (callee) != NULL_TREE
2936 && (id_equal (DECL_NAME (callee), "move")
2937 || id_equal (DECL_NAME (callee), "forward")
2938 || id_equal (DECL_NAME (callee), "addressof")
2939 /* This addressof equivalent is used heavily in libstdc++. */
2940 || id_equal (DECL_NAME (callee), "__addressof")
2941 || id_equal (DECL_NAME (callee), "as_const")))
2943 r = CALL_EXPR_ARG (x, 0);
2944 /* Check that the return and argument types are sane before
2945 folding. */
2946 if (INDIRECT_TYPE_P (TREE_TYPE (x))
2947 && INDIRECT_TYPE_P (TREE_TYPE (r)))
2949 if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
2950 r = build_nop (TREE_TYPE (x), r);
2951 x = cp_fold (r, flags);
2952 break;
2956 int sv = optimize, nw = sv;
2958 /* Some built-in function calls will be evaluated at compile-time in
2959 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2960 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2961 if (callee && fndecl_built_in_p (callee) && !optimize
2962 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2963 && current_function_decl
2964 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2965 nw = 1;
2967 if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2969 iloc_sentinel ils (EXPR_LOCATION (x));
2970 switch (DECL_FE_FUNCTION_CODE (callee))
2972 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2973 /* Defer folding __builtin_is_constant_evaluated unless
2974 we know this isn't a manifestly constant-evaluated
2975 context. */
2976 if (flags & ff_mce_false)
2977 x = boolean_false_node;
2978 break;
2979 case CP_BUILT_IN_SOURCE_LOCATION:
2980 x = fold_builtin_source_location (x);
2981 break;
2982 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2983 x = fold_builtin_is_corresponding_member
2984 (EXPR_LOCATION (x), call_expr_nargs (x),
2985 &CALL_EXPR_ARG (x, 0));
2986 break;
2987 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2988 x = fold_builtin_is_pointer_inverconvertible_with_class
2989 (EXPR_LOCATION (x), call_expr_nargs (x),
2990 &CALL_EXPR_ARG (x, 0));
2991 break;
2992 default:
2993 break;
2995 break;
2998 if (callee
2999 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
3000 BUILT_IN_FRONTEND))
3002 x = fold_builtin_source_location (x);
3003 break;
3006 bool changed = false;
3007 int m = call_expr_nargs (x);
3008 for (int i = 0; i < m; i++)
3010 r = cp_fold (CALL_EXPR_ARG (x, i), flags);
3011 if (r != CALL_EXPR_ARG (x, i))
3013 if (r == error_mark_node)
3015 x = error_mark_node;
3016 break;
3018 if (!changed)
3019 x = copy_node (x);
3020 CALL_EXPR_ARG (x, i) = r;
3021 changed = true;
3024 if (x == error_mark_node)
3025 break;
3027 optimize = nw;
3028 r = fold (x);
3029 optimize = sv;
3031 if (TREE_CODE (r) != CALL_EXPR)
3033 x = cp_fold (r, flags);
3034 break;
3037 optimize = nw;
3039 /* Invoke maybe_constant_value for functions declared
3040 constexpr and not called with AGGR_INIT_EXPRs.
3041 TODO:
3042 Do constexpr expansion of expressions where the call itself is not
3043 constant, but the call followed by an INDIRECT_REF is. */
3044 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
3045 && !flag_no_inline)
3047 mce_value manifestly_const_eval = mce_unknown;
3048 if (flags & ff_mce_false)
3049 /* Allow folding __builtin_is_constant_evaluated to false during
3050 constexpr evaluation of this call. */
3051 manifestly_const_eval = mce_false;
3052 r = maybe_constant_value (x, /*decl=*/NULL_TREE,
3053 manifestly_const_eval);
3055 optimize = sv;
3057 if (TREE_CODE (r) != CALL_EXPR)
3059 if (DECL_CONSTRUCTOR_P (callee))
3061 loc = EXPR_LOCATION (x);
3062 tree s = build_fold_indirect_ref_loc (loc,
3063 CALL_EXPR_ARG (x, 0));
3064 r = cp_build_init_expr (s, r);
3066 x = r;
3067 break;
3070 break;
3073 case CONSTRUCTOR:
3075 unsigned i;
3076 constructor_elt *p;
3077 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
3078 vec<constructor_elt, va_gc> *nelts = NULL;
3079 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
3081 tree op = cp_fold (p->value, flags);
3082 if (op != p->value)
3084 if (op == error_mark_node)
3086 x = error_mark_node;
3087 vec_free (nelts);
3088 break;
3090 if (nelts == NULL)
3091 nelts = elts->copy ();
3092 (*nelts)[i].value = op;
3095 if (nelts)
3097 x = build_constructor (TREE_TYPE (x), nelts);
3098 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
3099 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
3100 CONSTRUCTOR_MUTABLE_POISON (x)
3101 = CONSTRUCTOR_MUTABLE_POISON (org_x);
3103 if (VECTOR_TYPE_P (TREE_TYPE (x)))
3104 x = fold (x);
3105 break;
3107 case TREE_VEC:
3109 bool changed = false;
3110 int n = TREE_VEC_LENGTH (x);
3112 for (int i = 0; i < n; i++)
3114 tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
3115 if (op != TREE_VEC_ELT (x, i))
3117 if (!changed)
3118 x = copy_node (x);
3119 TREE_VEC_ELT (x, i) = op;
3120 changed = true;
3125 break;
3127 case ARRAY_REF:
3128 case ARRAY_RANGE_REF:
3130 loc = EXPR_LOCATION (x);
3131 op0 = cp_fold (TREE_OPERAND (x, 0), flags);
3132 op1 = cp_fold (TREE_OPERAND (x, 1), flags);
3133 op2 = cp_fold (TREE_OPERAND (x, 2), flags);
3134 op3 = cp_fold (TREE_OPERAND (x, 3), flags);
3136 if (op0 != TREE_OPERAND (x, 0)
3137 || op1 != TREE_OPERAND (x, 1)
3138 || op2 != TREE_OPERAND (x, 2)
3139 || op3 != TREE_OPERAND (x, 3))
3141 if (op0 == error_mark_node
3142 || op1 == error_mark_node
3143 || op2 == error_mark_node
3144 || op3 == error_mark_node)
3145 x = error_mark_node;
3146 else
3148 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3149 TREE_READONLY (x) = TREE_READONLY (org_x);
3150 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3151 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3155 x = fold (x);
3156 break;
3158 case SAVE_EXPR:
3159 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3160 folding, evaluates to an invariant. In that case no need to wrap
3161 this folded tree with a SAVE_EXPR. */
3162 r = cp_fold (TREE_OPERAND (x, 0), flags);
3163 if (tree_invariant_p (r))
3164 x = r;
3165 break;
3167 case REQUIRES_EXPR:
3168 x = evaluate_requires_expr (x);
3169 break;
3171 default:
3172 return org_x;
3175 if (EXPR_P (x) && TREE_CODE (x) == code)
3177 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3178 copy_warning (x, org_x);
3181 if (!c.evaluation_restricted_p ())
3183 fold_cache->put (org_x, x);
3184 /* Prevent that we try to fold an already folded result again. */
3185 if (x != org_x)
3186 fold_cache->put (x, x);
3189 return x;
3192 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3194 tree
3195 lookup_hotness_attribute (tree list)
3197 for (; list; list = TREE_CHAIN (list))
3199 tree name = get_attribute_name (list);
3200 if ((is_attribute_p ("hot", name)
3201 || is_attribute_p ("cold", name)
3202 || is_attribute_p ("likely", name)
3203 || is_attribute_p ("unlikely", name))
3204 && is_attribute_namespace_p ("", list))
3205 break;
3207 return list;
3210 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3212 static tree
3213 remove_hotness_attribute (tree list)
3215 for (tree *p = &list; *p; )
3217 tree l = *p;
3218 tree name = get_attribute_name (l);
3219 if ((is_attribute_p ("hot", name)
3220 || is_attribute_p ("cold", name)
3221 || is_attribute_p ("likely", name)
3222 || is_attribute_p ("unlikely", name))
3223 && is_attribute_namespace_p ("", l))
3225 *p = TREE_CHAIN (l);
3226 continue;
3228 p = &TREE_CHAIN (l);
3230 return list;
3233 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3234 PREDICT_EXPR. */
3236 tree
3237 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3239 if (std_attrs == error_mark_node)
3240 return std_attrs;
3241 if (tree attr = lookup_hotness_attribute (std_attrs))
3243 tree name = get_attribute_name (attr);
3244 bool hot = (is_attribute_p ("hot", name)
3245 || is_attribute_p ("likely", name));
3246 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3247 hot ? TAKEN : NOT_TAKEN);
3248 SET_EXPR_LOCATION (pred, attrs_loc);
3249 add_stmt (pred);
3250 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3251 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3252 get_attribute_name (other), name);
3253 std_attrs = remove_hotness_attribute (std_attrs);
3255 return std_attrs;
3258 /* Build IFN_ASSUME internal call for assume condition ARG. */
3260 tree
3261 build_assume_call (location_t loc, tree arg)
3263 if (!processing_template_decl)
3264 arg = fold_build_cleanup_point_expr (TREE_TYPE (arg), arg);
3265 return build_call_expr_internal_loc (loc, IFN_ASSUME, void_type_node,
3266 1, arg);
3269 /* If [[assume (cond)]] appears on this statement, handle it. */
3271 tree
3272 process_stmt_assume_attribute (tree std_attrs, tree statement,
3273 location_t attrs_loc)
3275 if (std_attrs == error_mark_node)
3276 return std_attrs;
3277 tree attr = lookup_attribute ("gnu", "assume", std_attrs);
3278 if (!attr)
3279 return std_attrs;
3280 /* The next token after the assume attribute is not ';'. */
3281 if (statement)
3283 warning_at (attrs_loc, OPT_Wattributes,
3284 "%<assume%> attribute not followed by %<;%>");
3285 attr = NULL_TREE;
3287 for (; attr; attr = lookup_attribute ("gnu", "assume", TREE_CHAIN (attr)))
3289 tree args = TREE_VALUE (attr);
3290 if (args && PACK_EXPANSION_P (args))
3292 auto_diagnostic_group d;
3293 error_at (attrs_loc, "pack expansion of %qE attribute",
3294 get_attribute_name (attr));
3295 if (cxx_dialect >= cxx17)
3296 inform (attrs_loc, "use fold expression in the attribute "
3297 "argument instead");
3298 continue;
3300 int nargs = list_length (args);
3301 if (nargs != 1)
3303 auto_diagnostic_group d;
3304 error_at (attrs_loc, "wrong number of arguments specified for "
3305 "%qE attribute", get_attribute_name (attr));
3306 inform (attrs_loc, "expected %i, found %i", 1, nargs);
3308 else
3310 tree arg = TREE_VALUE (args);
3311 if (!type_dependent_expression_p (arg))
3312 arg = contextual_conv_bool (arg, tf_warning_or_error);
3313 if (error_operand_p (arg))
3314 continue;
3315 finish_expr_stmt (build_assume_call (attrs_loc, arg));
3318 return remove_attribute ("gnu", "assume", std_attrs);
3321 /* Return the type std::source_location::__impl after performing
3322 verification on it. */
3324 tree
3325 get_source_location_impl_type ()
3327 tree name = get_identifier ("source_location");
3328 tree decl = lookup_qualified_name (std_node, name);
3329 if (TREE_CODE (decl) != TYPE_DECL)
3331 auto_diagnostic_group d;
3332 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3333 qualified_name_lookup_error (std_node, name, decl, input_location);
3334 else
3335 error ("%qD is not a type", decl);
3336 return error_mark_node;
3338 name = get_identifier ("__impl");
3339 tree type = TREE_TYPE (decl);
3340 decl = lookup_qualified_name (type, name);
3341 if (TREE_CODE (decl) != TYPE_DECL)
3343 auto_diagnostic_group d;
3344 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3345 qualified_name_lookup_error (type, name, decl, input_location);
3346 else
3347 error ("%qD is not a type", decl);
3348 return error_mark_node;
3350 type = TREE_TYPE (decl);
3351 if (TREE_CODE (type) != RECORD_TYPE)
3353 error ("%qD is not a class type", decl);
3354 return error_mark_node;
3357 int cnt = 0;
3358 for (tree field = TYPE_FIELDS (type);
3359 (field = next_aggregate_field (field)) != NULL_TREE;
3360 field = DECL_CHAIN (field))
3362 if (DECL_NAME (field) != NULL_TREE)
3364 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3365 if (strcmp (n, "_M_file_name") == 0
3366 || strcmp (n, "_M_function_name") == 0)
3368 if (TREE_TYPE (field) != const_string_type_node)
3370 error ("%qD does not have %<const char *%> type", field);
3371 return error_mark_node;
3373 cnt++;
3374 continue;
3376 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3378 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3380 error ("%qD does not have integral type", field);
3381 return error_mark_node;
3383 cnt++;
3384 continue;
3387 cnt = 0;
3388 break;
3390 if (cnt != 4)
3392 error ("%<std::source_location::__impl%> does not contain only "
3393 "non-static data members %<_M_file_name%>, "
3394 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3395 return error_mark_node;
3397 return build_qualified_type (type, TYPE_QUAL_CONST);
3400 /* Type for source_location_table hash_set. */
3401 struct GTY((for_user)) source_location_table_entry {
3402 location_t loc;
3403 unsigned uid;
3404 tree var;
3407 /* Traits class for function start hash maps below. */
3409 struct source_location_table_entry_hash
3410 : ggc_remove <source_location_table_entry>
3412 typedef source_location_table_entry value_type;
3413 typedef source_location_table_entry compare_type;
3415 static hashval_t
3416 hash (const source_location_table_entry &ref)
3418 inchash::hash hstate (0);
3419 hstate.add_int (ref.loc);
3420 hstate.add_int (ref.uid);
3421 return hstate.end ();
3424 static bool
3425 equal (const source_location_table_entry &ref1,
3426 const source_location_table_entry &ref2)
3428 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3431 static void
3432 mark_deleted (source_location_table_entry &ref)
3434 ref.loc = UNKNOWN_LOCATION;
3435 ref.uid = -1U;
3436 ref.var = NULL_TREE;
3439 static const bool empty_zero_p = true;
3441 static void
3442 mark_empty (source_location_table_entry &ref)
3444 ref.loc = UNKNOWN_LOCATION;
3445 ref.uid = 0;
3446 ref.var = NULL_TREE;
3449 static bool
3450 is_deleted (const source_location_table_entry &ref)
3452 return (ref.loc == UNKNOWN_LOCATION
3453 && ref.uid == -1U
3454 && ref.var == NULL_TREE);
3457 static bool
3458 is_empty (const source_location_table_entry &ref)
3460 return (ref.loc == UNKNOWN_LOCATION
3461 && ref.uid == 0
3462 && ref.var == NULL_TREE);
3465 static void
3466 pch_nx (source_location_table_entry &p)
3468 extern void gt_pch_nx (source_location_table_entry &);
3469 gt_pch_nx (p);
3472 static void
3473 pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3475 extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3476 void *);
3477 gt_pch_nx (&p, op, cookie);
3481 static GTY(()) hash_table <source_location_table_entry_hash>
3482 *source_location_table;
3483 static GTY(()) unsigned int source_location_id;
3485 /* Fold the __builtin_source_location () call T. */
3487 tree
3488 fold_builtin_source_location (const_tree t)
3490 gcc_assert (TREE_CODE (t) == CALL_EXPR);
3491 /* TREE_TYPE (t) is const std::source_location::__impl* */
3492 tree source_location_impl = TREE_TYPE (TREE_TYPE (t));
3493 if (source_location_impl == error_mark_node)
3494 return build_zero_cst (const_ptr_type_node);
3495 gcc_assert (CLASS_TYPE_P (source_location_impl)
3496 && id_equal (TYPE_IDENTIFIER (source_location_impl), "__impl"));
3498 location_t loc = EXPR_LOCATION (t);
3499 if (source_location_table == NULL)
3500 source_location_table
3501 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3502 const line_map_ordinary *map;
3503 source_location_table_entry entry;
3504 entry.loc
3505 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3506 &map);
3507 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3508 entry.var = error_mark_node;
3509 source_location_table_entry *entryp
3510 = source_location_table->find_slot (entry, INSERT);
3511 tree var;
3512 if (entryp->var)
3513 var = entryp->var;
3514 else
3516 char tmp_name[32];
3517 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3518 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3519 source_location_impl);
3520 TREE_STATIC (var) = 1;
3521 TREE_PUBLIC (var) = 0;
3522 DECL_ARTIFICIAL (var) = 1;
3523 DECL_IGNORED_P (var) = 1;
3524 DECL_EXTERNAL (var) = 0;
3525 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3526 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3527 layout_decl (var, 0);
3529 vec<constructor_elt, va_gc> *v = NULL;
3530 vec_alloc (v, 4);
3531 for (tree field = TYPE_FIELDS (source_location_impl);
3532 (field = next_aggregate_field (field)) != NULL_TREE;
3533 field = DECL_CHAIN (field))
3535 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3536 tree val = NULL_TREE;
3537 if (strcmp (n, "_M_file_name") == 0)
3539 if (const char *fname = LOCATION_FILE (loc))
3541 fname = remap_macro_filename (fname);
3542 val = build_string_literal (fname);
3544 else
3545 val = build_string_literal ("");
3547 else if (strcmp (n, "_M_function_name") == 0)
3549 const char *name = "";
3551 if (current_function_decl)
3552 name = cxx_printable_name (current_function_decl, 2);
3554 val = build_string_literal (name);
3556 else if (strcmp (n, "_M_line") == 0)
3557 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3558 else if (strcmp (n, "_M_column") == 0)
3559 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3560 else
3561 gcc_unreachable ();
3562 CONSTRUCTOR_APPEND_ELT (v, field, val);
3565 tree ctor = build_constructor (source_location_impl, v);
3566 TREE_CONSTANT (ctor) = 1;
3567 TREE_STATIC (ctor) = 1;
3568 DECL_INITIAL (var) = ctor;
3569 varpool_node::finalize_decl (var);
3570 *entryp = entry;
3571 entryp->var = var;
3574 return build_fold_addr_expr_with_type_loc (loc, var, TREE_TYPE (t));
3577 #include "gt-cp-cp-gimplify.h"