jit: Fix Darwin bootstrap after r15-1699.
[official-gcc.git] / gcc / cp / cp-gimplify.cc
blobe6629dea5fdc8963206ceb59ca3477bd1512f906
1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2024 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44 #include "opts.h"
46 /* Keep track of forward references to immediate-escalating functions in
47 case they become consteval. This vector contains ADDR_EXPRs and
48 PTRMEM_CSTs; it also stores FUNCTION_DECLs that had an escalating
49 function call in them, to check that they can be evaluated to a constant,
50 and immediate-escalating functions that may become consteval. */
51 static GTY(()) hash_set<tree> *deferred_escalating_exprs;
53 static void
54 remember_escalating_expr (tree t)
56 if (!deferred_escalating_exprs)
57 deferred_escalating_exprs = hash_set<tree>::create_ggc (37);
58 deferred_escalating_exprs->add (t);
61 /* Flags for cp_fold and cp_fold_r. */
63 enum fold_flags {
64 ff_none = 0,
65 /* Whether we're being called from cp_fold_function. */
66 ff_genericize = 1 << 0,
67 /* Whether we're folding a point where we know we're
68 definitely not in a manifestly constant-evaluated
69 context. */
70 ff_mce_false = 1 << 1,
73 using fold_flags_t = int;
75 struct cp_fold_data
77 hash_set<tree> pset;
78 fold_flags_t flags;
79 cp_fold_data (fold_flags_t flags): flags (flags) {}
82 /* Forward declarations. */
84 static tree cp_genericize_r (tree *, int *, void *);
85 static tree cp_fold_r (tree *, int *, void *);
86 static void cp_genericize_tree (tree*, bool);
87 static tree cp_fold (tree, fold_flags_t);
88 static tree cp_fold_immediate_r (tree *, int *, void *);
90 /* Genericize a TRY_BLOCK. */
92 static void
93 genericize_try_block (tree *stmt_p)
95 tree body = TRY_STMTS (*stmt_p);
96 tree cleanup = TRY_HANDLERS (*stmt_p);
98 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
101 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
103 static void
104 genericize_catch_block (tree *stmt_p)
106 tree type = HANDLER_TYPE (*stmt_p);
107 tree body = HANDLER_BODY (*stmt_p);
109 /* FIXME should the caught type go in TREE_TYPE? */
110 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
113 /* A terser interface for building a representation of an exception
114 specification. */
116 static tree
117 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
119 tree t;
121 /* FIXME should the allowed types go in TREE_TYPE? */
122 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
123 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
125 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
126 append_to_statement_list (body, &TREE_OPERAND (t, 0));
128 return t;
131 /* Genericize an EH_SPEC_BLOCK by converting it to a
132 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
134 static void
135 genericize_eh_spec_block (tree *stmt_p)
137 tree body = EH_SPEC_STMTS (*stmt_p);
138 tree allowed = EH_SPEC_RAISES (*stmt_p);
139 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
141 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
142 suppress_warning (*stmt_p);
143 suppress_warning (TREE_OPERAND (*stmt_p, 1));
146 /* Return the first non-compound statement in STMT. */
148 tree
149 first_stmt (tree stmt)
151 switch (TREE_CODE (stmt))
153 case STATEMENT_LIST:
154 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
155 return first_stmt (p->stmt);
156 return void_node;
158 case BIND_EXPR:
159 return first_stmt (BIND_EXPR_BODY (stmt));
161 default:
162 return stmt;
166 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
168 static void
169 genericize_if_stmt (tree *stmt_p)
171 tree stmt, cond, then_, else_;
172 location_t locus = EXPR_LOCATION (*stmt_p);
174 stmt = *stmt_p;
175 cond = IF_COND (stmt);
176 then_ = THEN_CLAUSE (stmt);
177 else_ = ELSE_CLAUSE (stmt);
179 if (then_ && else_)
181 tree ft = first_stmt (then_);
182 tree fe = first_stmt (else_);
183 br_predictor pr;
184 if (TREE_CODE (ft) == PREDICT_EXPR
185 && TREE_CODE (fe) == PREDICT_EXPR
186 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
187 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
189 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
190 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
191 warning_at (&richloc, OPT_Wattributes,
192 "both branches of %<if%> statement marked as %qs",
193 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
197 if (!then_)
198 then_ = build_empty_stmt (locus);
199 if (!else_)
200 else_ = build_empty_stmt (locus);
202 /* consteval if has been verified not to have the then_/else_ blocks
203 entered by gotos/case labels from elsewhere, and as then_ block
204 can contain unfolded immediate function calls, we have to discard
205 the then_ block regardless of whether else_ has side-effects or not. */
206 if (IF_STMT_CONSTEVAL_P (stmt))
208 if (block_may_fallthru (then_))
209 stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
210 void_node, else_);
211 else
212 stmt = else_;
214 else if (IF_STMT_CONSTEXPR_P (stmt))
215 stmt = integer_nonzerop (cond) ? then_ : else_;
216 /* ??? This optimization doesn't seem to belong here, but removing it
217 causes -Wreturn-type regressions (e.g. 107310). */
218 else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
219 stmt = then_;
220 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
221 stmt = else_;
222 else
223 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
224 protected_set_expr_location_if_unset (stmt, locus);
225 *stmt_p = stmt;
228 /* Hook into the middle of gimplifying an OMP_FOR node. */
230 static enum gimplify_status
231 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
233 tree for_stmt = *expr_p;
234 gimple_seq seq = NULL;
236 /* Protect ourselves from recursion. */
237 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
238 return GS_UNHANDLED;
239 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
241 gimplify_and_add (for_stmt, &seq);
242 gimple_seq_add_seq (pre_p, seq);
244 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
246 return GS_ALL_DONE;
249 /* Gimplify an EXPR_STMT node. */
251 static void
252 gimplify_expr_stmt (tree *stmt_p)
254 tree stmt = EXPR_STMT_EXPR (*stmt_p);
256 if (stmt == error_mark_node)
257 stmt = NULL;
259 /* Gimplification of a statement expression will nullify the
260 statement if all its side effects are moved to *PRE_P and *POST_P.
262 In this case we will not want to emit the gimplified statement.
263 However, we may still want to emit a warning, so we do that before
264 gimplification. */
265 if (stmt && warn_unused_value)
267 if (!TREE_SIDE_EFFECTS (stmt))
269 if (!IS_EMPTY_STMT (stmt)
270 && !VOID_TYPE_P (TREE_TYPE (stmt))
271 && !warning_suppressed_p (stmt, OPT_Wunused_value))
272 warning (OPT_Wunused_value, "statement with no effect");
274 else
275 warn_if_unused_value (stmt, input_location);
278 if (stmt == NULL_TREE)
279 stmt = alloc_stmt_list ();
281 *stmt_p = stmt;
284 /* Gimplify initialization from an AGGR_INIT_EXPR. */
286 static void
287 cp_gimplify_init_expr (tree *expr_p)
289 tree from = TREE_OPERAND (*expr_p, 1);
290 tree to = TREE_OPERAND (*expr_p, 0);
291 tree t;
293 if (TREE_CODE (from) == TARGET_EXPR)
294 if (tree init = TARGET_EXPR_INITIAL (from))
296 /* Make sure that we expected to elide this temporary. But also allow
297 gimplify_modify_expr_rhs to elide temporaries of trivial type. */
298 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from)
299 || !TREE_ADDRESSABLE (TREE_TYPE (from)));
300 if (target_expr_needs_replace (from))
302 /* If this was changed by cp_genericize_target_expr, we need to
303 walk into it to replace uses of the slot. */
304 replace_decl (&init, TARGET_EXPR_SLOT (from), to);
305 *expr_p = init;
306 return;
308 else
309 from = init;
312 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
313 inside the TARGET_EXPR. */
314 for (t = from; t; )
316 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
318 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
319 replace the slot operand with our target.
321 Should we add a target parm to gimplify_expr instead? No, as in this
322 case we want to replace the INIT_EXPR. */
323 if (TREE_CODE (sub) == AGGR_INIT_EXPR
324 || TREE_CODE (sub) == VEC_INIT_EXPR)
326 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
327 AGGR_INIT_EXPR_SLOT (sub) = to;
328 else
329 VEC_INIT_EXPR_SLOT (sub) = to;
330 *expr_p = from;
332 /* The initialization is now a side-effect, so the container can
333 become void. */
334 if (from != sub)
335 TREE_TYPE (from) = void_type_node;
338 /* Handle aggregate NSDMI. */
339 replace_placeholders (sub, to);
341 if (t == sub)
342 break;
343 else
344 t = TREE_OPERAND (t, 1);
349 /* Gimplify a MUST_NOT_THROW_EXPR. */
351 static enum gimplify_status
352 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
354 tree stmt = *expr_p;
355 tree temp = voidify_wrapper_expr (stmt, NULL);
356 tree body = TREE_OPERAND (stmt, 0);
357 gimple_seq try_ = NULL;
358 gimple_seq catch_ = NULL;
359 gimple *mnt;
361 gimplify_and_add (body, &try_);
362 mnt = gimple_build_eh_must_not_throw (call_terminate_fn);
363 gimple_seq_add_stmt_without_update (&catch_, mnt);
364 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
366 gimple_seq_add_stmt_without_update (pre_p, mnt);
367 if (temp)
369 *expr_p = temp;
370 return GS_OK;
373 *expr_p = NULL;
374 return GS_ALL_DONE;
377 /* Return TRUE if an operand (OP) of a given TYPE being copied is
378 really just an empty class copy.
380 Check that the operand has a simple form so that TARGET_EXPRs and
381 non-empty CONSTRUCTORs get reduced properly, and we leave the
382 return slot optimization alone because it isn't a copy. */
384 bool
385 simple_empty_class_p (tree type, tree op, tree_code code)
387 if (TREE_CODE (op) == COMPOUND_EXPR)
388 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
389 if (SIMPLE_TARGET_EXPR_P (op)
390 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
391 /* The TARGET_EXPR is itself a simple copy, look through it. */
392 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
394 if (TREE_CODE (op) == PARM_DECL
395 && TREE_ADDRESSABLE (TREE_TYPE (op)))
397 tree fn = DECL_CONTEXT (op);
398 if (DECL_THUNK_P (fn)
399 || lambda_static_thunk_p (fn))
400 /* In a thunk, we pass through invisible reference parms, so this isn't
401 actually a copy. */
402 return false;
405 return
406 (TREE_CODE (op) == EMPTY_CLASS_EXPR
407 || code == MODIFY_EXPR
408 || is_gimple_lvalue (op)
409 || INDIRECT_REF_P (op)
410 || (TREE_CODE (op) == CONSTRUCTOR
411 && CONSTRUCTOR_NELTS (op) == 0)
412 || (TREE_CODE (op) == CALL_EXPR
413 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
414 && !TREE_CLOBBER_P (op)
415 && is_really_empty_class (type, /*ignore_vptr*/true);
418 /* Returns true if evaluating E as an lvalue has side-effects;
419 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
420 have side-effects until there is a read or write through it. */
422 static bool
423 lvalue_has_side_effects (tree e)
425 if (!TREE_SIDE_EFFECTS (e))
426 return false;
427 while (handled_component_p (e))
429 if (TREE_CODE (e) == ARRAY_REF
430 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
431 return true;
432 e = TREE_OPERAND (e, 0);
434 if (DECL_P (e))
435 /* Just naming a variable has no side-effects. */
436 return false;
437 else if (INDIRECT_REF_P (e))
438 /* Similarly, indirection has no side-effects. */
439 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
440 else
441 /* For anything else, trust TREE_SIDE_EFFECTS. */
442 return TREE_SIDE_EFFECTS (e);
445 /* Return true if FN is an immediate-escalating function. */
447 static bool
448 immediate_escalating_function_p (tree fn)
450 if (!fn || !flag_immediate_escalation)
451 return false;
453 gcc_checking_assert (TREE_CODE (fn) == FUNCTION_DECL);
455 if (DECL_IMMEDIATE_FUNCTION_P (fn))
456 return false;
458 /* An immediate-escalating function is
459 -- the call operator of a lambda that is not declared with the consteval
460 specifier */
461 if (LAMBDA_FUNCTION_P (fn))
462 return true;
463 /* -- a defaulted special member function that is not declared with the
464 consteval specifier */
465 special_function_kind sfk = special_memfn_p (fn);
466 if (sfk != sfk_none && DECL_DEFAULTED_FN (fn))
467 return true;
468 /* -- a function that results from the instantiation of a templated entity
469 defined with the constexpr specifier. */
470 return is_instantiation_of_constexpr (fn);
473 /* Return true if FN is an immediate-escalating function that has not been
474 checked for escalating expressions.. */
476 static bool
477 unchecked_immediate_escalating_function_p (tree fn)
479 return (immediate_escalating_function_p (fn)
480 && !DECL_ESCALATION_CHECKED_P (fn));
483 /* Promote FN to an immediate function, including its clones. */
485 static void
486 promote_function_to_consteval (tree fn)
488 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
489 DECL_ESCALATION_CHECKED_P (fn) = true;
490 tree clone;
491 FOR_EACH_CLONE (clone, fn)
493 SET_DECL_IMMEDIATE_FUNCTION_P (clone);
494 DECL_ESCALATION_CHECKED_P (clone) = true;
498 /* A wrapper around cp_fold_immediate_r. Return a non-null tree if
499 we found a non-constant immediate function, or taking the address
500 of an immediate function. */
502 tree
503 cp_fold_immediate (tree *tp, mce_value manifestly_const_eval,
504 tree decl /*= current_function_decl*/)
506 if (cxx_dialect <= cxx17)
507 return NULL_TREE;
509 temp_override<tree> cfd (current_function_decl, decl);
511 fold_flags_t flags = ff_none;
512 if (manifestly_const_eval == mce_false)
513 flags |= ff_mce_false;
515 cp_fold_data data (flags);
516 int save_errorcount = errorcount;
517 tree r = cp_walk_tree_without_duplicates (tp, cp_fold_immediate_r, &data);
518 if (errorcount > save_errorcount)
519 return integer_one_node;
520 return r;
523 /* Maybe say that FN (a function decl with DECL_IMMEDIATE_FUNCTION_P set)
524 was initially not an immediate function, but was promoted to one because
525 its body contained an immediate-escalating expression or conversion. */
527 static void
528 maybe_explain_promoted_consteval (location_t loc, tree fn)
530 if (DECL_ESCALATION_CHECKED_P (fn))
532 /* See if we can figure out what made the function consteval. */
533 tree x = cp_fold_immediate (&DECL_SAVED_TREE (fn), mce_unknown, NULL_TREE);
534 if (x)
535 inform (cp_expr_loc_or_loc (x, loc),
536 "%qD was promoted to an immediate function because its "
537 "body contains an immediate-escalating expression %qE", fn, x);
538 else
539 inform (loc, "%qD was promoted to an immediate function", fn);
543 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
544 by expressions with side-effects in other operands. */
546 static enum gimplify_status
547 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
548 bool (*gimple_test_f) (tree))
550 enum gimplify_status t
551 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
552 if (t == GS_ERROR)
553 return GS_ERROR;
554 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
555 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
556 return t;
559 /* Like gimplify_arg, but if ORDERED is set (which should be set if
560 any of the arguments this argument is sequenced before has
561 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
562 are gimplified into SSA_NAME or a fresh temporary and for
563 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
565 static enum gimplify_status
566 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
567 bool ordered)
569 enum gimplify_status t;
570 if (ordered
571 && !is_gimple_reg_type (TREE_TYPE (*arg_p))
572 && TREE_CODE (*arg_p) == TARGET_EXPR)
574 /* gimplify_arg would strip away the TARGET_EXPR, but
575 that can mean we don't copy the argument and some following
576 argument with side-effect could modify it. */
577 protected_set_expr_location (*arg_p, call_location);
578 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
580 else
582 t = gimplify_arg (arg_p, pre_p, call_location);
583 if (t == GS_ERROR)
584 return GS_ERROR;
585 else if (ordered
586 && is_gimple_reg_type (TREE_TYPE (*arg_p))
587 && is_gimple_variable (*arg_p)
588 && TREE_CODE (*arg_p) != SSA_NAME
589 /* No need to force references into register, references
590 can't be modified. */
591 && !TYPE_REF_P (TREE_TYPE (*arg_p))
592 /* And this can't be modified either. */
593 && *arg_p != current_class_ptr)
594 *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
595 return t;
600 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
603 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
605 int saved_stmts_are_full_exprs_p = 0;
606 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
607 enum tree_code code = TREE_CODE (*expr_p);
608 enum gimplify_status ret;
610 if (STATEMENT_CODE_P (code))
612 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
613 current_stmt_tree ()->stmts_are_full_exprs_p
614 = STMT_IS_FULL_EXPR_P (*expr_p);
617 switch (code)
619 case AGGR_INIT_EXPR:
620 simplify_aggr_init_expr (expr_p);
621 ret = GS_OK;
622 break;
624 case VEC_INIT_EXPR:
626 *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
627 tf_warning_or_error);
629 cp_fold_data data (ff_genericize | ff_mce_false);
630 cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
631 cp_genericize_tree (expr_p, false);
632 copy_if_shared (expr_p);
633 ret = GS_OK;
635 break;
637 case THROW_EXPR:
638 /* FIXME communicate throw type to back end, probably by moving
639 THROW_EXPR into ../tree.def. */
640 *expr_p = TREE_OPERAND (*expr_p, 0);
641 ret = GS_OK;
642 break;
644 case MUST_NOT_THROW_EXPR:
645 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
646 break;
648 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
649 LHS of an assignment might also be involved in the RHS, as in bug
650 25979. */
651 case INIT_EXPR:
652 cp_gimplify_init_expr (expr_p);
653 if (TREE_CODE (*expr_p) != INIT_EXPR)
654 return GS_OK;
655 /* Fall through. */
656 case MODIFY_EXPR:
657 modify_expr_case:
659 /* If the back end isn't clever enough to know that the lhs and rhs
660 types are the same, add an explicit conversion. */
661 tree op0 = TREE_OPERAND (*expr_p, 0);
662 tree op1 = TREE_OPERAND (*expr_p, 1);
664 if (!error_operand_p (op0)
665 && !error_operand_p (op1)
666 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
667 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
668 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
669 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
670 TREE_TYPE (op0), op1);
672 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
674 while (TREE_CODE (op1) == TARGET_EXPR)
675 /* We're disconnecting the initializer from its target,
676 don't create a temporary. */
677 op1 = TARGET_EXPR_INITIAL (op1);
679 /* Remove any copies of empty classes. Also drop volatile
680 variables on the RHS to avoid infinite recursion from
681 gimplify_expr trying to load the value. */
682 if (TREE_SIDE_EFFECTS (op1))
684 if (TREE_THIS_VOLATILE (op1)
685 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
686 op1 = build_fold_addr_expr (op1);
688 gimplify_and_add (op1, pre_p);
690 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
691 is_gimple_lvalue, fb_lvalue);
692 *expr_p = TREE_OPERAND (*expr_p, 0);
693 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
694 /* Avoid 'return *<retval>;' */
695 *expr_p = TREE_OPERAND (*expr_p, 0);
697 /* P0145 says that the RHS is sequenced before the LHS.
698 gimplify_modify_expr gimplifies the RHS before the LHS, but that
699 isn't quite strong enough in two cases:
701 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
702 mean it's evaluated after the LHS.
704 2) the value calculation of the RHS is also sequenced before the
705 LHS, so for scalar assignment we need to preevaluate if the
706 RHS could be affected by LHS side-effects even if it has no
707 side-effects of its own. We don't need this for classes because
708 class assignment takes its RHS by reference. */
709 else if (flag_strong_eval_order > 1
710 && TREE_CODE (*expr_p) == MODIFY_EXPR
711 && lvalue_has_side_effects (op0)
712 && (TREE_CODE (op1) == CALL_EXPR
713 || (SCALAR_TYPE_P (TREE_TYPE (op1))
714 && !TREE_CONSTANT (op1))))
715 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
717 ret = GS_OK;
718 break;
720 case EMPTY_CLASS_EXPR:
721 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
722 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
723 ret = GS_OK;
724 break;
726 case BASELINK:
727 *expr_p = BASELINK_FUNCTIONS (*expr_p);
728 ret = GS_OK;
729 break;
731 case TRY_BLOCK:
732 genericize_try_block (expr_p);
733 ret = GS_OK;
734 break;
736 case HANDLER:
737 genericize_catch_block (expr_p);
738 ret = GS_OK;
739 break;
741 case EH_SPEC_BLOCK:
742 genericize_eh_spec_block (expr_p);
743 ret = GS_OK;
744 break;
746 case USING_STMT:
747 gcc_unreachable ();
749 case FOR_STMT:
750 case WHILE_STMT:
751 case DO_STMT:
752 case SWITCH_STMT:
753 case CONTINUE_STMT:
754 case BREAK_STMT:
755 gcc_unreachable ();
757 case OMP_FOR:
758 case OMP_SIMD:
759 case OMP_DISTRIBUTE:
760 case OMP_LOOP:
761 case OMP_TASKLOOP:
762 case OMP_TILE:
763 case OMP_UNROLL:
764 ret = cp_gimplify_omp_for (expr_p, pre_p);
765 break;
767 case EXPR_STMT:
768 gimplify_expr_stmt (expr_p);
769 ret = GS_OK;
770 break;
772 case UNARY_PLUS_EXPR:
774 tree arg = TREE_OPERAND (*expr_p, 0);
775 tree type = TREE_TYPE (*expr_p);
776 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
777 : arg;
778 ret = GS_OK;
780 break;
782 case CALL_EXPR:
783 ret = GS_OK;
784 if (flag_strong_eval_order == 2
785 && CALL_EXPR_FN (*expr_p)
786 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
787 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
789 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
790 enum gimplify_status t
791 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
792 is_gimple_call_addr);
793 if (t == GS_ERROR)
794 ret = GS_ERROR;
795 /* GIMPLE considers most pointer conversion useless, but for
796 calls we actually care about the exact function pointer type. */
797 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
798 CALL_EXPR_FN (*expr_p)
799 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
801 if (!CALL_EXPR_FN (*expr_p))
802 /* Internal function call. */;
803 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
805 /* This is a call to a (compound) assignment operator that used
806 the operator syntax; gimplify the RHS first. */
807 gcc_assert (call_expr_nargs (*expr_p) == 2);
808 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
809 enum gimplify_status t
810 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
811 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
812 if (t == GS_ERROR)
813 ret = GS_ERROR;
815 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
817 /* Leave the last argument for gimplify_call_expr, to avoid problems
818 with __builtin_va_arg_pack(). */
819 int nargs = call_expr_nargs (*expr_p) - 1;
820 int last_side_effects_arg = -1;
821 for (int i = nargs; i > 0; --i)
822 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
824 last_side_effects_arg = i;
825 break;
827 for (int i = 0; i < nargs; ++i)
829 enum gimplify_status t
830 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
831 i < last_side_effects_arg);
832 if (t == GS_ERROR)
833 ret = GS_ERROR;
836 else if (flag_strong_eval_order
837 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
839 /* If flag_strong_eval_order, evaluate the object argument first. */
840 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
841 if (INDIRECT_TYPE_P (fntype))
842 fntype = TREE_TYPE (fntype);
843 if (TREE_CODE (fntype) == METHOD_TYPE)
845 int nargs = call_expr_nargs (*expr_p);
846 bool side_effects = false;
847 for (int i = 1; i < nargs; ++i)
848 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
850 side_effects = true;
851 break;
853 enum gimplify_status t
854 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
855 side_effects);
856 if (t == GS_ERROR)
857 ret = GS_ERROR;
860 if (ret != GS_ERROR)
862 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
863 if (!decl)
864 break;
865 if (fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
866 switch (DECL_FE_FUNCTION_CODE (decl))
868 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
869 *expr_p = boolean_false_node;
870 break;
871 case CP_BUILT_IN_SOURCE_LOCATION:
872 *expr_p
873 = fold_builtin_source_location (*expr_p);
874 break;
875 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
876 *expr_p
877 = fold_builtin_is_corresponding_member
878 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
879 &CALL_EXPR_ARG (*expr_p, 0));
880 break;
881 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
882 *expr_p
883 = fold_builtin_is_pointer_inverconvertible_with_class
884 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
885 &CALL_EXPR_ARG (*expr_p, 0));
886 break;
887 default:
888 break;
890 else if (fndecl_built_in_p (decl, BUILT_IN_CLZG, BUILT_IN_CTZG))
891 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p,
892 post_p);
893 else
894 /* All consteval functions should have been processed by now. */
895 gcc_checking_assert (!immediate_invocation_p (decl));
897 break;
899 case TARGET_EXPR:
900 /* A TARGET_EXPR that expresses direct-initialization should have been
901 elided by cp_gimplify_init_expr. */
902 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
903 /* Likewise, but allow extra temps of trivial type so that
904 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
905 on the rhs of an assignment, as in constexpr-aggr1.C. */
906 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p)
907 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p)));
908 ret = GS_UNHANDLED;
909 break;
911 case PTRMEM_CST:
912 *expr_p = cplus_expand_constant (*expr_p);
913 if (TREE_CODE (*expr_p) == PTRMEM_CST)
914 ret = GS_ERROR;
915 else
916 ret = GS_OK;
917 break;
919 case RETURN_EXPR:
920 if (TREE_OPERAND (*expr_p, 0)
921 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
922 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
924 expr_p = &TREE_OPERAND (*expr_p, 0);
925 /* Avoid going through the INIT_EXPR case, which can
926 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
927 goto modify_expr_case;
929 /* Fall through. */
931 default:
932 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
933 break;
936 /* Restore saved state. */
937 if (STATEMENT_CODE_P (code))
938 current_stmt_tree ()->stmts_are_full_exprs_p
939 = saved_stmts_are_full_exprs_p;
941 return ret;
944 static inline bool
945 is_invisiref_parm (const_tree t)
947 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
948 && DECL_BY_REFERENCE (t));
951 /* A stable comparison routine for use with splay trees and DECLs. */
953 static int
954 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
956 tree a = (tree) xa;
957 tree b = (tree) xb;
959 return DECL_UID (a) - DECL_UID (b);
962 /* OpenMP context during genericization. */
964 struct cp_genericize_omp_taskreg
966 bool is_parallel;
967 bool default_shared;
968 struct cp_genericize_omp_taskreg *outer;
969 splay_tree variables;
972 /* Return true if genericization should try to determine if
973 DECL is firstprivate or shared within task regions. */
975 static bool
976 omp_var_to_track (tree decl)
978 tree type = TREE_TYPE (decl);
979 if (is_invisiref_parm (decl))
980 type = TREE_TYPE (type);
981 else if (TYPE_REF_P (type))
982 type = TREE_TYPE (type);
983 while (TREE_CODE (type) == ARRAY_TYPE)
984 type = TREE_TYPE (type);
985 if (type == error_mark_node || !CLASS_TYPE_P (type))
986 return false;
987 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
988 return false;
989 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
990 return false;
991 return true;
994 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
996 static void
997 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
999 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
1000 (splay_tree_key) decl);
1001 if (n == NULL)
1003 int flags = OMP_CLAUSE_DEFAULT_SHARED;
1004 if (omp_ctx->outer)
1005 omp_cxx_notice_variable (omp_ctx->outer, decl);
1006 if (!omp_ctx->default_shared)
1008 struct cp_genericize_omp_taskreg *octx;
1010 for (octx = omp_ctx->outer; octx; octx = octx->outer)
1012 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1013 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1015 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1016 break;
1018 if (octx->is_parallel)
1019 break;
1021 if (octx == NULL
1022 && (TREE_CODE (decl) == PARM_DECL
1023 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1024 && DECL_CONTEXT (decl) == current_function_decl)))
1025 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1026 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1028 /* DECL is implicitly determined firstprivate in
1029 the current task construct. Ensure copy ctor and
1030 dtor are instantiated, because during gimplification
1031 it will be already too late. */
1032 tree type = TREE_TYPE (decl);
1033 if (is_invisiref_parm (decl))
1034 type = TREE_TYPE (type);
1035 else if (TYPE_REF_P (type))
1036 type = TREE_TYPE (type);
1037 while (TREE_CODE (type) == ARRAY_TYPE)
1038 type = TREE_TYPE (type);
1039 get_copy_ctor (type, tf_none);
1040 get_dtor (type, tf_none);
1043 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1047 /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
1048 not expected to elide, e.g. because unsafe_copy_elision_p is true. */
1050 static bool
1051 any_non_eliding_target_exprs (tree ctor)
1053 for (const constructor_elt &e : *CONSTRUCTOR_ELTS (ctor))
1055 if (TREE_CODE (e.value) == TARGET_EXPR
1056 && !TARGET_EXPR_ELIDING_P (e.value))
1057 return true;
1059 return false;
1062 /* If we might need to clean up a partially constructed object, break down the
1063 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
1064 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
1065 the result. */
1067 static void
1068 cp_genericize_init (tree *replace, tree from, tree to, vec<tree,va_gc>** flags)
1070 tree init = NULL_TREE;
1071 if (TREE_CODE (from) == VEC_INIT_EXPR)
1072 init = expand_vec_init_expr (to, from, tf_warning_or_error, flags);
1073 else if (TREE_CODE (from) == CONSTRUCTOR
1074 && TREE_SIDE_EFFECTS (from)
1075 && ((flag_exceptions
1076 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
1077 || any_non_eliding_target_exprs (from)))
1079 to = cp_stabilize_reference (to);
1080 replace_placeholders (from, to);
1081 init = split_nonconstant_init (to, from);
1084 if (init)
1086 if (*replace == from)
1087 /* Make cp_gimplify_init_expr call replace_decl on this
1088 TARGET_EXPR_INITIAL. */
1089 init = fold_convert (void_type_node, init);
1090 *replace = init;
1094 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
1096 static void
1097 cp_genericize_init_expr (tree *stmt_p)
1099 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
1100 tree to = TREE_OPERAND (*stmt_p, 0);
1101 tree from = TREE_OPERAND (*stmt_p, 1);
1102 if (SIMPLE_TARGET_EXPR_P (from)
1103 /* Return gets confused if we clobber its INIT_EXPR this soon. */
1104 && TREE_CODE (to) != RESULT_DECL)
1105 from = TARGET_EXPR_INITIAL (from);
1106 cp_genericize_init (stmt_p, from, to, nullptr);
1109 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
1110 replace_decl later when we know what we're initializing. */
1112 static void
1113 cp_genericize_target_expr (tree *stmt_p)
1115 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
1116 tree slot = TARGET_EXPR_SLOT (*stmt_p);
1117 vec<tree, va_gc> *flags = make_tree_vector ();
1118 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
1119 TARGET_EXPR_INITIAL (*stmt_p), slot, &flags);
1120 gcc_assert (!DECL_INITIAL (slot));
1121 for (tree f : flags)
1123 /* Once initialization is complete TARGET_EXPR_CLEANUP becomes active, so
1124 disable any subobject cleanups. */
1125 tree d = build_disable_temp_cleanup (f);
1126 auto &r = TARGET_EXPR_INITIAL (*stmt_p);
1127 r = add_stmt_to_compound (r, d);
1129 release_tree_vector (flags);
1132 /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
1133 TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1134 replacement when cp_folding TARGET_EXPR to preserve the invariant that
1135 AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1137 static bool
1138 maybe_replace_decl (tree *tp, tree decl, tree replacement)
1140 if (!*tp || !VOID_TYPE_P (TREE_TYPE (*tp)))
1141 return false;
1142 tree t = *tp;
1143 while (TREE_CODE (t) == COMPOUND_EXPR)
1144 t = TREE_OPERAND (t, 1);
1145 if (TREE_CODE (t) == AGGR_INIT_EXPR)
1146 replace_decl (&AGGR_INIT_EXPR_SLOT (t), decl, replacement);
1147 else if (TREE_CODE (t) == VEC_INIT_EXPR)
1148 replace_decl (&VEC_INIT_EXPR_SLOT (t), decl, replacement);
1149 else
1150 replace_decl (tp, decl, replacement);
1151 return true;
1154 /* Genericization context. */
1156 struct cp_genericize_data
1158 hash_set<tree> *p_set;
1159 auto_vec<tree> bind_expr_stack;
1160 struct cp_genericize_omp_taskreg *omp_ctx;
1161 tree try_block;
1162 bool no_sanitize_p;
1163 bool handle_invisiref_parm_p;
1166 /* Emit an error about taking the address of an immediate function.
1167 EXPR is the whole expression; DECL is the immediate function. */
1169 static void
1170 taking_address_of_imm_fn_error (tree expr, tree decl)
1172 auto_diagnostic_group d;
1173 const location_t loc = (TREE_CODE (expr) == PTRMEM_CST
1174 ? PTRMEM_CST_LOCATION (expr)
1175 : EXPR_LOCATION (expr));
1176 error_at (loc, "taking address of an immediate function %qD", decl);
1177 maybe_explain_promoted_consteval (loc, decl);
1180 /* A subroutine of cp_fold_r to handle immediate functions. */
1182 static tree
1183 cp_fold_immediate_r (tree *stmt_p, int *walk_subtrees, void *data_)
1185 auto data = static_cast<cp_fold_data *>(data_);
1186 tree stmt = *stmt_p;
1187 /* The purpose of this is not to emit errors for mce_unknown. */
1188 const tsubst_flags_t complain = (data->flags & ff_mce_false
1189 ? tf_error : tf_none);
1190 const tree_code code = TREE_CODE (stmt);
1192 /* No need to look into types or unevaluated operands.
1193 NB: This affects cp_fold_r as well. */
1194 if (TYPE_P (stmt)
1195 || unevaluated_p (code)
1196 /* We do not use in_immediate_context here because it checks
1197 more than is desirable, e.g., sk_template_parms. */
1198 || cp_unevaluated_operand
1199 || (current_function_decl
1200 && DECL_IMMEDIATE_FUNCTION_P (current_function_decl)))
1202 *walk_subtrees = 0;
1203 return NULL_TREE;
1206 tree decl = NULL_TREE;
1207 bool call_p = false;
1209 /* We are looking for &fn or fn(). */
1210 switch (code)
1212 case CALL_EXPR:
1213 case AGGR_INIT_EXPR:
1214 if (tree fn = cp_get_callee (stmt))
1215 if (TREE_CODE (fn) != ADDR_EXPR || ADDR_EXPR_DENOTES_CALL_P (fn))
1216 decl = cp_get_fndecl_from_callee (fn, /*fold*/false);
1217 call_p = true;
1218 break;
1219 case PTRMEM_CST:
1220 decl = PTRMEM_CST_MEMBER (stmt);
1221 break;
1222 case ADDR_EXPR:
1223 if (!ADDR_EXPR_DENOTES_CALL_P (stmt))
1224 decl = TREE_OPERAND (stmt, 0);
1225 break;
1226 default:
1227 return NULL_TREE;
1230 if (!decl || TREE_CODE (decl) != FUNCTION_DECL)
1231 return NULL_TREE;
1233 /* Fully escalate once all templates have been instantiated. What we're
1234 calling is not a consteval function but it may become one. This
1235 requires recursing; DECL may be promoted to consteval because it
1236 contains an escalating expression E, but E itself may have to be
1237 promoted first, etc. */
1238 if (at_eof > 1 && unchecked_immediate_escalating_function_p (decl))
1240 /* Set before the actual walk to avoid endless recursion. */
1241 DECL_ESCALATION_CHECKED_P (decl) = true;
1242 /* We're only looking for the first escalating expression. Let us not
1243 walk more trees than necessary, hence mce_unknown. */
1244 cp_fold_immediate (&DECL_SAVED_TREE (decl), mce_unknown, decl);
1247 /* [expr.const]p16 "An expression or conversion is immediate-escalating if
1248 it is not initially in an immediate function context and it is either
1249 -- an immediate invocation that is not a constant expression and is not
1250 a subexpression of an immediate invocation."
1252 If we are in an immediate-escalating function, the immediate-escalating
1253 expression or conversion makes it an immediate function. So STMT does
1254 not need to produce a constant expression. */
1255 if (DECL_IMMEDIATE_FUNCTION_P (decl))
1257 tree e = cxx_constant_value (stmt, tf_none);
1258 if (e == error_mark_node)
1260 /* This takes care of, e.g.,
1261 template <typename T>
1262 constexpr int f(T t)
1264 return id(t);
1266 where id (consteval) causes f<int> to be promoted. */
1267 if (immediate_escalating_function_p (current_function_decl))
1268 promote_function_to_consteval (current_function_decl);
1269 else if (complain & tf_error)
1271 if (call_p)
1273 auto_diagnostic_group d;
1274 location_t loc = cp_expr_loc_or_input_loc (stmt);
1275 error_at (loc, "call to consteval function %qE is "
1276 "not a constant expression", stmt);
1277 /* Explain why it's not a constant expression. */
1278 *stmt_p = cxx_constant_value (stmt, complain);
1279 maybe_explain_promoted_consteval (loc, decl);
1281 else if (!data->pset.add (stmt))
1283 taking_address_of_imm_fn_error (stmt, decl);
1284 *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1286 /* If we're giving hard errors, continue the walk rather than
1287 bailing out after the first error. */
1288 return NULL_TREE;
1290 *walk_subtrees = 0;
1291 return stmt;
1293 /* We've evaluated the consteval function call. */
1294 if (call_p)
1295 *stmt_p = e;
1297 /* We've encountered a function call that may turn out to be consteval
1298 later. Store its caller so that we can ensure that the call is
1299 a constant expression. */
1300 else if (unchecked_immediate_escalating_function_p (decl))
1302 /* Make sure we're not inserting new elements while walking
1303 the deferred_escalating_exprs hash table; if we are, it's
1304 likely that a function wasn't properly marked checked for
1305 i-e expressions. */
1306 gcc_checking_assert (at_eof <= 1);
1307 if (current_function_decl)
1308 remember_escalating_expr (current_function_decl);
1309 /* auto p = &f<int>; in the global scope won't be ensconced in
1310 a function we could store for later at this point. (If there's
1311 no c_f_d at this point and we're dealing with a call, we should
1312 see the call when cp_fold_function __static_i_and_d.) */
1313 else if (!call_p)
1314 remember_escalating_expr (stmt);
1317 return NULL_TREE;
1320 /* Perform any pre-gimplification folding of C++ front end trees to
1321 GENERIC.
1322 Note: The folding of non-omp cases is something to move into
1323 the middle-end. As for now we have most foldings only on GENERIC
1324 in fold-const, we need to perform this before transformation to
1325 GIMPLE-form.
1327 ??? This is algorithmically weird because walk_tree works in pre-order, so
1328 we see outer expressions before inner expressions. This isn't as much of an
1329 issue because cp_fold recurses into subexpressions in many cases, but then
1330 walk_tree walks back into those subexpressions again. We avoid the
1331 resulting complexity problem by caching the result of cp_fold, but it's
1332 inelegant. */
1334 static tree
1335 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
1337 cp_fold_data *data = (cp_fold_data*)data_;
1338 tree stmt = *stmt_p;
1339 enum tree_code code = TREE_CODE (stmt);
1341 if (cxx_dialect >= cxx20)
1343 /* Unfortunately we must handle code like
1344 false ? bar () : 42
1345 where we have to check bar too. The cp_fold call below could
1346 fold the ?: into a constant before we've checked it. */
1347 if (code == COND_EXPR)
1349 auto then_fn = cp_fold_r, else_fn = cp_fold_r;
1350 /* See if we can figure out if either of the branches is dead. If it
1351 is, we don't need to do everything that cp_fold_r does. */
1352 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_fold_r, data, nullptr);
1353 if (integer_zerop (TREE_OPERAND (stmt, 0)))
1354 then_fn = cp_fold_immediate_r;
1355 else if (integer_nonzerop (TREE_OPERAND (stmt, 0)))
1356 else_fn = cp_fold_immediate_r;
1358 if (TREE_OPERAND (stmt, 1))
1359 cp_walk_tree (&TREE_OPERAND (stmt, 1), then_fn, data,
1360 nullptr);
1361 if (TREE_OPERAND (stmt, 2))
1362 cp_walk_tree (&TREE_OPERAND (stmt, 2), else_fn, data,
1363 nullptr);
1364 *walk_subtrees = 0;
1365 /* Don't return yet, still need the cp_fold below. */
1367 else
1368 cp_fold_immediate_r (stmt_p, walk_subtrees, data);
1371 *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
1373 /* For certain trees, like +foo(), the cp_fold above will remove the +,
1374 and the subsequent tree walk would go straight down to the CALL_EXPR's
1375 operands, meaning that cp_fold_immediate_r would never see the
1376 CALL_EXPR. Ew :(. */
1377 if (TREE_CODE (stmt) == CALL_EXPR && code != CALL_EXPR)
1378 cp_fold_immediate_r (stmt_p, walk_subtrees, data);
1380 if (data->pset.add (stmt))
1382 /* Don't walk subtrees of stmts we've already walked once, otherwise
1383 we can have exponential complexity with e.g. lots of nested
1384 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1385 always the same tree, which the first time cp_fold_r has been
1386 called on it had the subtrees walked. */
1387 *walk_subtrees = 0;
1388 return NULL_TREE;
1391 code = TREE_CODE (stmt);
1392 switch (code)
1394 tree x;
1395 int i, n;
1396 case OMP_FOR:
1397 case OMP_SIMD:
1398 case OMP_DISTRIBUTE:
1399 case OMP_LOOP:
1400 case OMP_TASKLOOP:
1401 case OMP_TILE:
1402 case OMP_UNROLL:
1403 case OACC_LOOP:
1404 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1405 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1406 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1407 x = OMP_FOR_COND (stmt);
1408 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1410 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1411 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1413 else if (x && TREE_CODE (x) == TREE_VEC)
1415 n = TREE_VEC_LENGTH (x);
1416 for (i = 0; i < n; i++)
1418 tree o = TREE_VEC_ELT (x, i);
1419 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1420 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1423 x = OMP_FOR_INCR (stmt);
1424 if (x && TREE_CODE (x) == TREE_VEC)
1426 n = TREE_VEC_LENGTH (x);
1427 for (i = 0; i < n; i++)
1429 tree o = TREE_VEC_ELT (x, i);
1430 if (o && TREE_CODE (o) == MODIFY_EXPR)
1431 o = TREE_OPERAND (o, 1);
1432 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1433 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1435 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1436 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1440 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1441 *walk_subtrees = 0;
1442 return NULL_TREE;
1444 case IF_STMT:
1445 if (IF_STMT_CONSTEVAL_P (stmt))
1447 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1448 boolean_false_node. */
1449 cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1450 cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1451 *walk_subtrees = 0;
1452 return NULL_TREE;
1454 break;
1456 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1457 here rather than in cp_genericize to avoid problems with the invisible
1458 reference transition. */
1459 case INIT_EXPR:
1460 if (data->flags & ff_genericize)
1461 cp_genericize_init_expr (stmt_p);
1462 break;
1464 case TARGET_EXPR:
1465 if (data->flags & ff_genericize)
1466 cp_genericize_target_expr (stmt_p);
1468 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1469 that case, strip it in favor of this one. */
1470 if (tree &init = TARGET_EXPR_INITIAL (stmt))
1472 cp_walk_tree (&init, cp_fold_r, data, NULL);
1473 cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt), cp_fold_r, data, NULL);
1474 *walk_subtrees = 0;
1475 if (TREE_CODE (init) == TARGET_EXPR)
1477 tree sub = TARGET_EXPR_INITIAL (init);
1478 maybe_replace_decl (&sub, TARGET_EXPR_SLOT (init),
1479 TARGET_EXPR_SLOT (stmt));
1480 init = sub;
1483 break;
1485 default:
1486 break;
1489 return NULL_TREE;
1492 /* Fold ALL the trees! FIXME we should be able to remove this, but
1493 apparently that still causes optimization regressions. */
1495 void
1496 cp_fold_function (tree fndecl)
1498 /* By now all manifestly-constant-evaluated expressions will have
1499 been constant-evaluated already if possible, so we can safely
1500 pass ff_mce_false. */
1501 cp_fold_data data (ff_genericize | ff_mce_false);
1502 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1504 /* This is merely an optimization: if FNDECL has no i-e expressions,
1505 we'll not save c_f_d, and we can safely say that FNDECL will not
1506 be promoted to consteval. */
1507 if (deferred_escalating_exprs
1508 && !deferred_escalating_exprs->contains (current_function_decl))
1509 DECL_ESCALATION_CHECKED_P (fndecl) = true;
1512 /* We've stashed immediate-escalating functions. Now see if they indeed
1513 ought to be promoted to consteval. */
1515 void
1516 process_and_check_pending_immediate_escalating_fns ()
1518 /* This will be null for -fno-immediate-escalation. */
1519 if (!deferred_escalating_exprs)
1520 return;
1522 for (auto e : *deferred_escalating_exprs)
1523 if (TREE_CODE (e) == FUNCTION_DECL && !DECL_ESCALATION_CHECKED_P (e))
1524 cp_fold_immediate (&DECL_SAVED_TREE (e), mce_false, e);
1526 /* We've escalated every function that could have been promoted to
1527 consteval. Check that we are not taking the address of a consteval
1528 function. */
1529 for (auto e : *deferred_escalating_exprs)
1531 if (TREE_CODE (e) == FUNCTION_DECL)
1532 continue;
1533 tree decl = (TREE_CODE (e) == PTRMEM_CST
1534 ? PTRMEM_CST_MEMBER (e)
1535 : TREE_OPERAND (e, 0));
1536 if (DECL_IMMEDIATE_FUNCTION_P (decl))
1537 taking_address_of_imm_fn_error (e, decl);
1540 deferred_escalating_exprs = nullptr;
1543 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1545 static tree genericize_spaceship (tree expr)
1547 iloc_sentinel s (cp_expr_location (expr));
1548 tree type = TREE_TYPE (expr);
1549 tree op0 = TREE_OPERAND (expr, 0);
1550 tree op1 = TREE_OPERAND (expr, 1);
1551 return genericize_spaceship (input_location, type, op0, op1);
1554 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1555 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1556 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1557 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1559 tree
1560 predeclare_vla (tree expr)
1562 tree type = TREE_TYPE (expr);
1563 if (type == error_mark_node)
1564 return expr;
1565 if (is_typedef_decl (expr))
1566 type = DECL_ORIGINAL_TYPE (expr);
1568 /* We need to strip pointers for gimplify_type_sizes. */
1569 tree vla = type;
1570 while (POINTER_TYPE_P (vla))
1572 if (TYPE_NAME (vla))
1573 return expr;
1574 vla = TREE_TYPE (vla);
1576 if (vla == type || TYPE_NAME (vla)
1577 || !variably_modified_type_p (vla, NULL_TREE))
1578 return expr;
1580 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1581 DECL_ARTIFICIAL (decl) = 1;
1582 TYPE_NAME (vla) = decl;
1583 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1584 if (DECL_P (expr))
1586 add_stmt (dexp);
1587 return NULL_TREE;
1589 else
1591 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1592 return expr;
1596 /* Perform any pre-gimplification lowering of C++ front end trees to
1597 GENERIC. */
1599 static tree
1600 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1602 tree stmt = *stmt_p;
1603 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1604 hash_set<tree> *p_set = wtd->p_set;
1606 /* If in an OpenMP context, note var uses. */
1607 if (UNLIKELY (wtd->omp_ctx != NULL)
1608 && (VAR_P (stmt)
1609 || TREE_CODE (stmt) == PARM_DECL
1610 || TREE_CODE (stmt) == RESULT_DECL)
1611 && omp_var_to_track (stmt))
1612 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1614 /* Don't dereference parms in a thunk, pass the references through. */
1615 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1616 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1618 *walk_subtrees = 0;
1619 return NULL;
1622 /* Dereference invisible reference parms. */
1623 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1625 *stmt_p = convert_from_reference (stmt);
1626 p_set->add (*stmt_p);
1627 *walk_subtrees = 0;
1628 return NULL;
1631 /* Map block scope extern declarations to visible declarations with the
1632 same name and type in outer scopes if any. */
1633 if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1634 if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1636 if (alias != error_mark_node)
1638 *stmt_p = alias;
1639 TREE_USED (alias) |= TREE_USED (stmt);
1641 *walk_subtrees = 0;
1642 return NULL;
1645 if (TREE_CODE (stmt) == INTEGER_CST
1646 && TYPE_REF_P (TREE_TYPE (stmt))
1647 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1648 && !wtd->no_sanitize_p)
1650 ubsan_maybe_instrument_reference (stmt_p);
1651 if (*stmt_p != stmt)
1653 *walk_subtrees = 0;
1654 return NULL_TREE;
1658 /* Other than invisiref parms, don't walk the same tree twice. */
1659 if (p_set->contains (stmt))
1661 *walk_subtrees = 0;
1662 return NULL_TREE;
1665 switch (TREE_CODE (stmt))
1667 case ADDR_EXPR:
1668 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1670 /* If in an OpenMP context, note var uses. */
1671 if (UNLIKELY (wtd->omp_ctx != NULL)
1672 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1673 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1674 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1675 *walk_subtrees = 0;
1677 break;
1679 case RETURN_EXPR:
1680 if (TREE_OPERAND (stmt, 0))
1682 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1683 /* Don't dereference an invisiref RESULT_DECL inside a
1684 RETURN_EXPR. */
1685 *walk_subtrees = 0;
1686 if (RETURN_EXPR_LOCAL_ADDR_P (stmt))
1688 /* Don't return the address of a local variable. */
1689 tree *p = &TREE_OPERAND (stmt, 0);
1690 while (TREE_CODE (*p) == COMPOUND_EXPR)
1691 p = &TREE_OPERAND (*p, 0);
1692 if (TREE_CODE (*p) == INIT_EXPR)
1694 tree op = TREE_OPERAND (*p, 1);
1695 tree new_op = build2 (COMPOUND_EXPR, TREE_TYPE (op), op,
1696 build_zero_cst (TREE_TYPE (op)));
1697 TREE_OPERAND (*p, 1) = new_op;
1701 break;
1703 case OMP_CLAUSE:
1704 switch (OMP_CLAUSE_CODE (stmt))
1706 case OMP_CLAUSE_LASTPRIVATE:
1707 /* Don't dereference an invisiref in OpenMP clauses. */
1708 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1710 *walk_subtrees = 0;
1711 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1712 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1713 cp_genericize_r, data, NULL);
1715 break;
1716 case OMP_CLAUSE_PRIVATE:
1717 /* Don't dereference an invisiref in OpenMP clauses. */
1718 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1719 *walk_subtrees = 0;
1720 else if (wtd->omp_ctx != NULL)
1722 /* Private clause doesn't cause any references to the
1723 var in outer contexts, avoid calling
1724 omp_cxx_notice_variable for it. */
1725 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1726 wtd->omp_ctx = NULL;
1727 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1728 data, NULL);
1729 wtd->omp_ctx = old;
1730 *walk_subtrees = 0;
1732 break;
1733 case OMP_CLAUSE_SHARED:
1734 case OMP_CLAUSE_FIRSTPRIVATE:
1735 case OMP_CLAUSE_COPYIN:
1736 case OMP_CLAUSE_COPYPRIVATE:
1737 case OMP_CLAUSE_INCLUSIVE:
1738 case OMP_CLAUSE_EXCLUSIVE:
1739 /* Don't dereference an invisiref in OpenMP clauses. */
1740 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1741 *walk_subtrees = 0;
1742 break;
1743 case OMP_CLAUSE_REDUCTION:
1744 case OMP_CLAUSE_IN_REDUCTION:
1745 case OMP_CLAUSE_TASK_REDUCTION:
1746 /* Don't dereference an invisiref in reduction clause's
1747 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1748 still needs to be genericized. */
1749 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1751 *walk_subtrees = 0;
1752 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1753 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1754 cp_genericize_r, data, NULL);
1755 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1756 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1757 cp_genericize_r, data, NULL);
1759 break;
1760 default:
1761 break;
1763 break;
1765 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1766 to lower this construct before scanning it, so we need to lower these
1767 before doing anything else. */
1768 case CLEANUP_STMT:
1769 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1770 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1771 : TRY_FINALLY_EXPR,
1772 void_type_node,
1773 CLEANUP_BODY (stmt),
1774 CLEANUP_EXPR (stmt));
1775 break;
1777 case IF_STMT:
1778 genericize_if_stmt (stmt_p);
1779 /* *stmt_p has changed, tail recurse to handle it again. */
1780 return cp_genericize_r (stmt_p, walk_subtrees, data);
1782 /* COND_EXPR might have incompatible types in branches if one or both
1783 arms are bitfields. Fix it up now. */
1784 case COND_EXPR:
1786 tree type_left
1787 = (TREE_OPERAND (stmt, 1)
1788 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1789 : NULL_TREE);
1790 tree type_right
1791 = (TREE_OPERAND (stmt, 2)
1792 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1793 : NULL_TREE);
1794 if (type_left
1795 && !useless_type_conversion_p (TREE_TYPE (stmt),
1796 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1798 TREE_OPERAND (stmt, 1)
1799 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1800 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1801 type_left));
1803 if (type_right
1804 && !useless_type_conversion_p (TREE_TYPE (stmt),
1805 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1807 TREE_OPERAND (stmt, 2)
1808 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1809 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1810 type_right));
1813 break;
1815 case BIND_EXPR:
1816 if (UNLIKELY (wtd->omp_ctx != NULL))
1818 tree decl;
1819 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1820 if (VAR_P (decl)
1821 && !DECL_EXTERNAL (decl)
1822 && omp_var_to_track (decl))
1824 splay_tree_node n
1825 = splay_tree_lookup (wtd->omp_ctx->variables,
1826 (splay_tree_key) decl);
1827 if (n == NULL)
1828 splay_tree_insert (wtd->omp_ctx->variables,
1829 (splay_tree_key) decl,
1830 TREE_STATIC (decl)
1831 ? OMP_CLAUSE_DEFAULT_SHARED
1832 : OMP_CLAUSE_DEFAULT_PRIVATE);
1835 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1837 /* The point here is to not sanitize static initializers. */
1838 bool no_sanitize_p = wtd->no_sanitize_p;
1839 wtd->no_sanitize_p = true;
1840 for (tree decl = BIND_EXPR_VARS (stmt);
1841 decl;
1842 decl = DECL_CHAIN (decl))
1843 if (VAR_P (decl)
1844 && TREE_STATIC (decl)
1845 && DECL_INITIAL (decl))
1846 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1847 wtd->no_sanitize_p = no_sanitize_p;
1849 wtd->bind_expr_stack.safe_push (stmt);
1850 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1851 cp_genericize_r, data, NULL);
1852 wtd->bind_expr_stack.pop ();
1853 break;
1855 case ASSERTION_STMT:
1856 case PRECONDITION_STMT:
1857 case POSTCONDITION_STMT:
1859 if (tree check = build_contract_check (stmt))
1861 *stmt_p = check;
1862 return cp_genericize_r (stmt_p, walk_subtrees, data);
1865 /* If we didn't build a check, replace it with void_node so we don't
1866 leak contracts into GENERIC. */
1867 *stmt_p = void_node;
1868 *walk_subtrees = 0;
1870 break;
1872 case USING_STMT:
1874 tree block = NULL_TREE;
1876 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1877 BLOCK, and append an IMPORTED_DECL to its
1878 BLOCK_VARS chained list. */
1879 if (wtd->bind_expr_stack.exists ())
1881 int i;
1882 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1883 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1884 break;
1886 if (block)
1888 tree decl = TREE_OPERAND (stmt, 0);
1889 gcc_assert (decl);
1891 if (undeduced_auto_decl (decl))
1892 /* Omit from the GENERIC, the back-end can't handle it. */;
1893 else
1895 tree using_directive = make_node (IMPORTED_DECL);
1896 TREE_TYPE (using_directive) = void_type_node;
1897 DECL_CONTEXT (using_directive) = current_function_decl;
1898 DECL_SOURCE_LOCATION (using_directive)
1899 = cp_expr_loc_or_input_loc (stmt);
1901 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1902 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1903 BLOCK_VARS (block) = using_directive;
1906 /* The USING_STMT won't appear in GENERIC. */
1907 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1908 *walk_subtrees = 0;
1910 break;
1912 case DECL_EXPR:
1913 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1915 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1916 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1917 *walk_subtrees = 0;
1919 else
1921 tree d = DECL_EXPR_DECL (stmt);
1922 if (VAR_P (d))
1923 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1925 break;
1927 case OMP_PARALLEL:
1928 case OMP_TASK:
1929 case OMP_TASKLOOP:
1931 struct cp_genericize_omp_taskreg omp_ctx;
1932 tree c, decl;
1933 splay_tree_node n;
1935 *walk_subtrees = 0;
1936 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1937 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1938 omp_ctx.default_shared = omp_ctx.is_parallel;
1939 omp_ctx.outer = wtd->omp_ctx;
1940 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1941 wtd->omp_ctx = &omp_ctx;
1942 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1943 switch (OMP_CLAUSE_CODE (c))
1945 case OMP_CLAUSE_SHARED:
1946 case OMP_CLAUSE_PRIVATE:
1947 case OMP_CLAUSE_FIRSTPRIVATE:
1948 case OMP_CLAUSE_LASTPRIVATE:
1949 decl = OMP_CLAUSE_DECL (c);
1950 if (decl == error_mark_node || !omp_var_to_track (decl))
1951 break;
1952 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1953 if (n != NULL)
1954 break;
1955 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1956 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1957 ? OMP_CLAUSE_DEFAULT_SHARED
1958 : OMP_CLAUSE_DEFAULT_PRIVATE);
1959 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1960 omp_cxx_notice_variable (omp_ctx.outer, decl);
1961 break;
1962 case OMP_CLAUSE_DEFAULT:
1963 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1964 omp_ctx.default_shared = true;
1965 default:
1966 break;
1968 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1969 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1970 cp_genericize_r, cp_walk_subtrees);
1971 else
1972 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1973 wtd->omp_ctx = omp_ctx.outer;
1974 splay_tree_delete (omp_ctx.variables);
1976 break;
1978 case OMP_TARGET:
1979 cfun->has_omp_target = true;
1980 break;
1982 case TRY_BLOCK:
1984 *walk_subtrees = 0;
1985 tree try_block = wtd->try_block;
1986 wtd->try_block = stmt;
1987 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1988 wtd->try_block = try_block;
1989 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1991 break;
1993 case MUST_NOT_THROW_EXPR:
1994 /* MUST_NOT_THROW_COND might be something else with TM. */
1995 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1997 *walk_subtrees = 0;
1998 tree try_block = wtd->try_block;
1999 wtd->try_block = stmt;
2000 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
2001 wtd->try_block = try_block;
2003 break;
2005 case THROW_EXPR:
2007 location_t loc = location_of (stmt);
2008 if (warning_suppressed_p (stmt /* What warning? */))
2009 /* Never mind. */;
2010 else if (wtd->try_block)
2012 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
2014 auto_diagnostic_group d;
2015 if (warning_at (loc, OPT_Wterminate,
2016 "%<throw%> will always call %<terminate%>")
2017 && cxx_dialect >= cxx11
2018 && DECL_DESTRUCTOR_P (current_function_decl))
2019 inform (loc, "in C++11 destructors default to %<noexcept%>");
2022 else
2024 if (warn_cxx11_compat && cxx_dialect < cxx11
2025 && DECL_DESTRUCTOR_P (current_function_decl)
2026 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
2027 == NULL_TREE)
2028 && (get_defaulted_eh_spec (current_function_decl)
2029 == empty_except_spec))
2030 warning_at (loc, OPT_Wc__11_compat,
2031 "in C++11 this %<throw%> will call %<terminate%> "
2032 "because destructors default to %<noexcept%>");
2035 break;
2037 case CONVERT_EXPR:
2038 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt)));
2039 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
2040 break;
2042 case SPACESHIP_EXPR:
2043 *stmt_p = genericize_spaceship (*stmt_p);
2044 break;
2046 case PTRMEM_CST:
2047 /* By the time we get here we're handing off to the back end, so we don't
2048 need or want to preserve PTRMEM_CST anymore. */
2049 *stmt_p = cplus_expand_constant (stmt);
2050 *walk_subtrees = 0;
2051 break;
2053 case MEM_REF:
2054 /* For MEM_REF, make sure not to sanitize the second operand even
2055 if it has reference type. It is just an offset with a type
2056 holding other information. There is no other processing we
2057 need to do for INTEGER_CSTs, so just ignore the second argument
2058 unconditionally. */
2059 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
2060 *walk_subtrees = 0;
2061 break;
2063 case NOP_EXPR:
2064 *stmt_p = predeclare_vla (*stmt_p);
2066 /* Warn of new allocations that are not big enough for the target
2067 type. */
2068 if (warn_alloc_size
2069 && TREE_CODE (TREE_OPERAND (stmt, 0)) == CALL_EXPR
2070 && POINTER_TYPE_P (TREE_TYPE (stmt)))
2072 if (tree fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 0)))
2073 if (DECL_IS_MALLOC (fndecl))
2075 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (fndecl));
2076 tree alloc_size = lookup_attribute ("alloc_size", attrs);
2077 if (alloc_size)
2078 warn_for_alloc_size (EXPR_LOCATION (stmt),
2079 TREE_TYPE (TREE_TYPE (stmt)),
2080 TREE_OPERAND (stmt, 0), alloc_size);
2084 if (!wtd->no_sanitize_p
2085 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
2086 && TYPE_REF_P (TREE_TYPE (stmt)))
2087 ubsan_maybe_instrument_reference (stmt_p);
2088 break;
2090 case CALL_EXPR:
2091 /* Evaluate function concept checks instead of treating them as
2092 normal functions. */
2093 if (concept_check_p (stmt))
2095 *stmt_p = evaluate_concept_check (stmt);
2096 * walk_subtrees = 0;
2097 break;
2100 if (!wtd->no_sanitize_p
2101 && sanitize_flags_p ((SANITIZE_NULL
2102 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
2104 tree fn = CALL_EXPR_FN (stmt);
2105 if (fn != NULL_TREE
2106 && !error_operand_p (fn)
2107 && INDIRECT_TYPE_P (TREE_TYPE (fn))
2108 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
2110 bool is_ctor
2111 = TREE_CODE (fn) == ADDR_EXPR
2112 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2113 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
2114 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
2115 ubsan_maybe_instrument_member_call (stmt, is_ctor);
2116 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
2117 cp_ubsan_maybe_instrument_member_call (stmt);
2119 else if (fn == NULL_TREE
2120 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
2121 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
2122 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
2123 *walk_subtrees = 0;
2125 /* Fall through. */
2126 case AGGR_INIT_EXPR:
2127 /* For calls to a multi-versioned function, overload resolution
2128 returns the function with the highest target priority, that is,
2129 the version that will checked for dispatching first. If this
2130 version is inlinable, a direct call to this version can be made
2131 otherwise the call should go through the dispatcher. */
2133 tree fn = cp_get_callee_fndecl_nofold (stmt);
2134 if (fn && DECL_FUNCTION_VERSIONED (fn)
2135 && (current_function_decl == NULL
2136 || !targetm.target_option.can_inline_p (current_function_decl,
2137 fn)))
2138 if (tree dis = get_function_version_dispatcher (fn))
2140 mark_versions_used (dis);
2141 dis = build_address (dis);
2142 if (TREE_CODE (stmt) == CALL_EXPR)
2143 CALL_EXPR_FN (stmt) = dis;
2144 else
2145 AGGR_INIT_EXPR_FN (stmt) = dis;
2148 break;
2150 case TARGET_EXPR:
2151 if (TARGET_EXPR_INITIAL (stmt)
2152 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
2153 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
2154 TARGET_EXPR_NO_ELIDE (stmt) = 1;
2155 break;
2157 case TEMPLATE_ID_EXPR:
2158 gcc_assert (concept_check_p (stmt));
2159 /* Emit the value of the concept check. */
2160 *stmt_p = evaluate_concept_check (stmt);
2161 walk_subtrees = 0;
2162 break;
2164 case OMP_DISTRIBUTE:
2165 /* Need to explicitly instantiate copy ctors on class iterators of
2166 composite distribute parallel for. */
2167 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
2169 tree *data[4] = { NULL, NULL, NULL, NULL };
2170 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
2171 find_combined_omp_for, data, NULL);
2172 if (inner != NULL_TREE
2173 && TREE_CODE (inner) == OMP_FOR)
2175 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
2176 if (TREE_VEC_ELT (OMP_FOR_INIT (inner), i)
2177 && OMP_FOR_ORIG_DECLS (inner)
2178 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
2179 i)) == TREE_LIST
2180 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
2181 i)))
2183 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
2184 /* Class iterators aren't allowed on OMP_SIMD, so the only
2185 case we need to solve is distribute parallel for. */
2186 gcc_assert (TREE_CODE (inner) == OMP_FOR
2187 && data[1]);
2188 tree orig_decl = TREE_PURPOSE (orig);
2189 tree c, cl = NULL_TREE;
2190 for (c = OMP_FOR_CLAUSES (inner);
2191 c; c = OMP_CLAUSE_CHAIN (c))
2192 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
2193 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
2194 && OMP_CLAUSE_DECL (c) == orig_decl)
2196 cl = c;
2197 break;
2199 if (cl == NULL_TREE)
2201 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
2202 c; c = OMP_CLAUSE_CHAIN (c))
2203 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
2204 && OMP_CLAUSE_DECL (c) == orig_decl)
2206 cl = c;
2207 break;
2210 if (cl)
2212 orig_decl = require_complete_type (orig_decl);
2213 tree inner_type = TREE_TYPE (orig_decl);
2214 if (orig_decl == error_mark_node)
2215 continue;
2216 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
2217 inner_type = TREE_TYPE (inner_type);
2219 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2220 inner_type = TREE_TYPE (inner_type);
2221 get_copy_ctor (inner_type, tf_warning_or_error);
2226 /* FALLTHRU */
2228 case FOR_STMT:
2229 case WHILE_STMT:
2230 case DO_STMT:
2231 case SWITCH_STMT:
2232 case CONTINUE_STMT:
2233 case BREAK_STMT:
2234 case OMP_FOR:
2235 case OMP_SIMD:
2236 case OMP_LOOP:
2237 case OMP_TILE:
2238 case OMP_UNROLL:
2239 case OACC_LOOP:
2240 case STATEMENT_LIST:
2241 /* These cases are handled by shared code. */
2242 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
2243 cp_genericize_r, cp_walk_subtrees);
2244 break;
2246 case BIT_CAST_EXPR:
2247 *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
2248 TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2249 break;
2251 default:
2252 if (IS_TYPE_OR_DECL_P (stmt))
2253 *walk_subtrees = 0;
2254 break;
2257 p_set->add (*stmt_p);
2259 return NULL;
2262 /* Lower C++ front end trees to GENERIC in T_P. */
2264 static void
2265 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
2267 struct cp_genericize_data wtd;
2269 wtd.p_set = new hash_set<tree>;
2270 wtd.bind_expr_stack.create (0);
2271 wtd.omp_ctx = NULL;
2272 wtd.try_block = NULL_TREE;
2273 wtd.no_sanitize_p = false;
2274 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
2275 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
2276 delete wtd.p_set;
2277 if (sanitize_flags_p (SANITIZE_VPTR))
2278 cp_ubsan_instrument_member_accesses (t_p);
2281 /* If a function that should end with a return in non-void
2282 function doesn't obviously end with return, add ubsan
2283 instrumentation code to verify it at runtime. If -fsanitize=return
2284 is not enabled, instrument __builtin_unreachable. */
2286 static void
2287 cp_maybe_instrument_return (tree fndecl)
2289 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
2290 || DECL_CONSTRUCTOR_P (fndecl)
2291 || DECL_DESTRUCTOR_P (fndecl)
2292 || !targetm.warn_func_return (fndecl))
2293 return;
2295 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
2296 /* Don't add __builtin_unreachable () if not optimizing, it will not
2297 improve any optimizations in that case, just break UB code.
2298 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
2299 UBSan covers this with ubsan_instrument_return above where sufficient
2300 information is provided, while the __builtin_unreachable () below
2301 if return sanitization is disabled will just result in hard to
2302 understand runtime error without location. */
2303 && ((!optimize && !flag_unreachable_traps)
2304 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
2305 return;
2307 tree t = DECL_SAVED_TREE (fndecl);
2308 while (t)
2310 switch (TREE_CODE (t))
2312 case BIND_EXPR:
2313 t = BIND_EXPR_BODY (t);
2314 continue;
2315 case TRY_FINALLY_EXPR:
2316 case CLEANUP_POINT_EXPR:
2317 t = TREE_OPERAND (t, 0);
2318 continue;
2319 case STATEMENT_LIST:
2321 tree_stmt_iterator i = tsi_last (t);
2322 while (!tsi_end_p (i))
2324 tree p = tsi_stmt (i);
2325 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
2326 break;
2327 tsi_prev (&i);
2329 if (!tsi_end_p (i))
2331 t = tsi_stmt (i);
2332 continue;
2335 break;
2336 case RETURN_EXPR:
2337 return;
2338 default:
2339 break;
2341 break;
2343 if (t == NULL_TREE)
2344 return;
2345 tree *p = &DECL_SAVED_TREE (fndecl);
2346 if (TREE_CODE (*p) == BIND_EXPR)
2347 p = &BIND_EXPR_BODY (*p);
2349 location_t loc = DECL_SOURCE_LOCATION (fndecl);
2350 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
2351 t = ubsan_instrument_return (loc);
2352 else
2353 t = build_builtin_unreachable (BUILTINS_LOCATION);
2355 append_to_statement_list (t, p);
2358 void
2359 cp_genericize (tree fndecl)
2361 tree t;
2363 /* Fix up the types of parms passed by invisible reference. */
2364 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
2365 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
2367 /* If a function's arguments are copied to create a thunk,
2368 then DECL_BY_REFERENCE will be set -- but the type of the
2369 argument will be a pointer type, so we will never get
2370 here. */
2371 gcc_assert (!DECL_BY_REFERENCE (t));
2372 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
2373 TREE_TYPE (t) = DECL_ARG_TYPE (t);
2374 DECL_BY_REFERENCE (t) = 1;
2375 TREE_ADDRESSABLE (t) = 0;
2376 relayout_decl (t);
2379 /* Do the same for the return value. */
2380 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
2382 t = DECL_RESULT (fndecl);
2383 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
2384 DECL_BY_REFERENCE (t) = 1;
2385 TREE_ADDRESSABLE (t) = 0;
2386 relayout_decl (t);
2387 if (DECL_NAME (t))
2389 /* Adjust DECL_VALUE_EXPR of the original var. */
2390 tree outer = outer_curly_brace_block (current_function_decl);
2391 tree var;
2393 if (outer)
2394 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2395 if (VAR_P (var)
2396 && DECL_NAME (t) == DECL_NAME (var)
2397 && DECL_HAS_VALUE_EXPR_P (var)
2398 && DECL_VALUE_EXPR (var) == t)
2400 tree val = convert_from_reference (t);
2401 SET_DECL_VALUE_EXPR (var, val);
2402 break;
2407 /* If we're a clone, the body is already GIMPLE. */
2408 if (DECL_CLONED_FUNCTION_P (fndecl))
2409 return;
2411 /* Allow cp_genericize calls to be nested. */
2412 bc_state_t save_state;
2413 save_bc_state (&save_state);
2415 /* We do want to see every occurrence of the parms, so we can't just use
2416 walk_tree's hash functionality. */
2417 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
2419 cp_maybe_instrument_return (fndecl);
2421 /* Do everything else. */
2422 c_genericize (fndecl);
2423 restore_bc_state (&save_state);
2426 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2427 NULL if there is in fact nothing to do. ARG2 may be null if FN
2428 actually only takes one argument. */
2430 static tree
2431 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2433 tree defparm, parm, t;
2434 int i = 0;
2435 int nargs;
2436 tree *argarray;
2438 if (fn == NULL)
2439 return NULL;
2441 nargs = list_length (DECL_ARGUMENTS (fn));
2442 argarray = XALLOCAVEC (tree, nargs);
2444 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2445 if (arg2)
2446 defparm = TREE_CHAIN (defparm);
2448 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
2449 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2451 tree inner_type = TREE_TYPE (arg1);
2452 tree start1, end1, p1;
2453 tree start2 = NULL, p2 = NULL;
2454 tree ret = NULL, lab;
2456 start1 = arg1;
2457 start2 = arg2;
2460 inner_type = TREE_TYPE (inner_type);
2461 start1 = build4 (ARRAY_REF, inner_type, start1,
2462 size_zero_node, NULL, NULL);
2463 if (arg2)
2464 start2 = build4 (ARRAY_REF, inner_type, start2,
2465 size_zero_node, NULL, NULL);
2467 while (TREE_CODE (inner_type) == ARRAY_TYPE);
2468 start1 = build_fold_addr_expr_loc (input_location, start1);
2469 if (arg2)
2470 start2 = build_fold_addr_expr_loc (input_location, start2);
2472 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2473 end1 = fold_build_pointer_plus (start1, end1);
2475 p1 = create_tmp_var (TREE_TYPE (start1));
2476 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2477 append_to_statement_list (t, &ret);
2479 if (arg2)
2481 p2 = create_tmp_var (TREE_TYPE (start2));
2482 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2483 append_to_statement_list (t, &ret);
2486 lab = create_artificial_label (input_location);
2487 t = build1 (LABEL_EXPR, void_type_node, lab);
2488 append_to_statement_list (t, &ret);
2490 argarray[i++] = p1;
2491 if (arg2)
2492 argarray[i++] = p2;
2493 /* Handle default arguments. */
2494 for (parm = defparm; parm && parm != void_list_node;
2495 parm = TREE_CHAIN (parm), i++)
2496 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2497 TREE_PURPOSE (parm), fn,
2498 i - is_method, tf_warning_or_error);
2499 t = build_call_a (fn, i, argarray);
2500 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2501 t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2502 t = fold_convert (void_type_node, t);
2503 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2504 append_to_statement_list (t, &ret);
2506 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2507 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2508 append_to_statement_list (t, &ret);
2510 if (arg2)
2512 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2513 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2514 append_to_statement_list (t, &ret);
2517 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2518 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2519 append_to_statement_list (t, &ret);
2521 return ret;
2523 else
2525 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2526 if (arg2)
2527 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2528 /* Handle default arguments. */
2529 for (parm = defparm; parm && parm != void_list_node;
2530 parm = TREE_CHAIN (parm), i++)
2531 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2532 TREE_PURPOSE (parm), fn,
2533 i - is_method, tf_warning_or_error);
2534 t = build_call_a (fn, i, argarray);
2535 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2536 t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2537 t = fold_convert (void_type_node, t);
2538 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2542 /* Return code to initialize DECL with its default constructor, or
2543 NULL if there's nothing to do. */
2545 tree
2546 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2548 tree info = CP_OMP_CLAUSE_INFO (clause);
2549 tree ret = NULL;
2551 if (info)
2552 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2554 return ret;
2557 /* Return code to initialize DST with a copy constructor from SRC. */
2559 tree
2560 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2562 tree info = CP_OMP_CLAUSE_INFO (clause);
2563 tree ret = NULL;
2565 if (info)
2566 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2567 if (ret == NULL)
2568 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2570 return ret;
2573 /* Similarly, except use an assignment operator instead. */
2575 tree
2576 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2578 tree info = CP_OMP_CLAUSE_INFO (clause);
2579 tree ret = NULL;
2581 if (info)
2582 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2583 if (ret == NULL)
2584 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2586 return ret;
2589 /* Return code to destroy DECL. */
2591 tree
2592 cxx_omp_clause_dtor (tree clause, tree decl)
2594 tree info = CP_OMP_CLAUSE_INFO (clause);
2595 tree ret = NULL;
2597 if (info)
2598 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2600 return ret;
2603 /* True if OpenMP should privatize what this DECL points to rather
2604 than the DECL itself. */
2606 bool
2607 cxx_omp_privatize_by_reference (const_tree decl)
2609 return (TYPE_REF_P (TREE_TYPE (decl))
2610 || is_invisiref_parm (decl));
2613 /* Return true if DECL is const qualified var having no mutable member. */
2614 bool
2615 cxx_omp_const_qual_no_mutable (tree decl)
2617 tree type = TREE_TYPE (decl);
2618 if (TYPE_REF_P (type))
2620 if (!is_invisiref_parm (decl))
2621 return false;
2622 type = TREE_TYPE (type);
2624 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2626 /* NVR doesn't preserve const qualification of the
2627 variable's type. */
2628 tree outer = outer_curly_brace_block (current_function_decl);
2629 tree var;
2631 if (outer)
2632 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2633 if (VAR_P (var)
2634 && DECL_NAME (decl) == DECL_NAME (var)
2635 && (TYPE_MAIN_VARIANT (type)
2636 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2638 if (TYPE_READONLY (TREE_TYPE (var)))
2639 type = TREE_TYPE (var);
2640 break;
2645 if (type == error_mark_node)
2646 return false;
2648 /* Variables with const-qualified type having no mutable member
2649 are predetermined shared. */
2650 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2651 return true;
2653 return false;
2656 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2657 of DECL is predetermined. */
2659 enum omp_clause_default_kind
2660 cxx_omp_predetermined_sharing_1 (tree decl)
2662 /* Static data members are predetermined shared. */
2663 if (TREE_STATIC (decl))
2665 tree ctx = CP_DECL_CONTEXT (decl);
2666 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2667 return OMP_CLAUSE_DEFAULT_SHARED;
2669 if (c_omp_predefined_variable (decl))
2670 return OMP_CLAUSE_DEFAULT_SHARED;
2673 /* this may not be specified in data-sharing clauses, still we need
2674 to predetermined it firstprivate. */
2675 if (decl == current_class_ptr)
2676 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2678 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2681 /* Likewise, but also include the artificial vars. We don't want to
2682 disallow the artificial vars being mentioned in explicit clauses,
2683 as we use artificial vars e.g. for loop constructs with random
2684 access iterators other than pointers, but during gimplification
2685 we want to treat them as predetermined. */
2687 enum omp_clause_default_kind
2688 cxx_omp_predetermined_sharing (tree decl)
2690 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2691 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2692 return ret;
2694 /* Predetermine artificial variables holding integral values, those
2695 are usually result of gimplify_one_sizepos or SAVE_EXPR
2696 gimplification. */
2697 if (VAR_P (decl)
2698 && DECL_ARTIFICIAL (decl)
2699 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2700 && !(DECL_LANG_SPECIFIC (decl)
2701 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2702 return OMP_CLAUSE_DEFAULT_SHARED;
2704 /* Similarly for typeinfo symbols. */
2705 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2706 return OMP_CLAUSE_DEFAULT_SHARED;
2708 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2711 enum omp_clause_defaultmap_kind
2712 cxx_omp_predetermined_mapping (tree decl)
2714 /* Predetermine artificial variables holding integral values, those
2715 are usually result of gimplify_one_sizepos or SAVE_EXPR
2716 gimplification. */
2717 if (VAR_P (decl)
2718 && DECL_ARTIFICIAL (decl)
2719 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2720 && !(DECL_LANG_SPECIFIC (decl)
2721 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2722 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2724 if (c_omp_predefined_variable (decl))
2725 return OMP_CLAUSE_DEFAULTMAP_TO;
2727 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2730 /* Finalize an implicitly determined clause. */
2732 void
2733 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2735 tree decl, inner_type;
2736 bool make_shared = false;
2738 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2739 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2740 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2741 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2742 return;
2744 decl = OMP_CLAUSE_DECL (c);
2745 decl = require_complete_type (decl);
2746 inner_type = TREE_TYPE (decl);
2747 if (decl == error_mark_node)
2748 make_shared = true;
2749 else if (TYPE_REF_P (TREE_TYPE (decl)))
2750 inner_type = TREE_TYPE (inner_type);
2752 /* We're interested in the base element, not arrays. */
2753 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2754 inner_type = TREE_TYPE (inner_type);
2756 /* Check for special function availability by building a call to one.
2757 Save the results, because later we won't be in the right context
2758 for making these queries. */
2759 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2760 bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2761 if (!make_shared
2762 && CLASS_TYPE_P (inner_type)
2763 && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2764 true))
2765 make_shared = true;
2767 if (make_shared)
2769 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2770 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2771 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2775 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2776 disregarded in OpenMP construct, because it is going to be
2777 remapped during OpenMP lowering. SHARED is true if DECL
2778 is going to be shared, false if it is going to be privatized. */
2780 bool
2781 cxx_omp_disregard_value_expr (tree decl, bool shared)
2783 if (shared)
2784 return false;
2785 if (VAR_P (decl)
2786 && DECL_HAS_VALUE_EXPR_P (decl)
2787 && DECL_ARTIFICIAL (decl)
2788 && DECL_LANG_SPECIFIC (decl)
2789 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2790 return true;
2791 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2792 return true;
2793 return false;
2796 /* Fold expression X which is used as an rvalue if RVAL is true. */
2798 static tree
2799 cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
2801 while (true)
2803 x = cp_fold (x, flags);
2804 if (rval)
2805 x = mark_rvalue_use (x);
2806 if (rval && DECL_P (x)
2807 && !TYPE_REF_P (TREE_TYPE (x)))
2809 tree v = decl_constant_value (x);
2810 if (v != x && v != error_mark_node)
2812 x = v;
2813 continue;
2816 break;
2818 return x;
2821 tree
2822 cp_fold_maybe_rvalue (tree x, bool rval)
2824 return cp_fold_maybe_rvalue (x, rval, ff_none);
2827 /* Fold expression X which is used as an rvalue. */
2829 static tree
2830 cp_fold_rvalue (tree x, fold_flags_t flags)
2832 return cp_fold_maybe_rvalue (x, true, flags);
2835 tree
2836 cp_fold_rvalue (tree x)
2838 return cp_fold_rvalue (x, ff_none);
2841 /* Perform folding on expression X. */
2843 static tree
2844 cp_fully_fold (tree x, mce_value manifestly_const_eval)
2846 if (processing_template_decl)
2847 return x;
2848 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2849 have to call both. */
2850 if (cxx_dialect >= cxx11)
2852 x = maybe_constant_value (x, /*decl=*/NULL_TREE, manifestly_const_eval);
2853 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2854 a TARGET_EXPR; undo that here. */
2855 if (TREE_CODE (x) == TARGET_EXPR)
2856 x = TARGET_EXPR_INITIAL (x);
2857 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2858 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2859 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2860 x = TREE_OPERAND (x, 0);
2862 fold_flags_t flags = ff_none;
2863 if (manifestly_const_eval == mce_false)
2864 flags |= ff_mce_false;
2865 return cp_fold_rvalue (x, flags);
2868 tree
2869 cp_fully_fold (tree x)
2871 return cp_fully_fold (x, mce_unknown);
2874 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2875 in some cases. */
2877 tree
2878 cp_fully_fold_init (tree x)
2880 if (processing_template_decl)
2881 return x;
2882 x = cp_fully_fold (x, mce_false);
2883 cp_fold_data data (ff_mce_false);
2884 cp_walk_tree (&x, cp_fold_r, &data, NULL);
2885 return x;
2888 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2889 and certain changes are made to the folding done. Or should be (FIXME). We
2890 never touch maybe_const, as it is only used for the C front-end
2891 C_MAYBE_CONST_EXPR. */
2893 tree
2894 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2896 return cp_fold_maybe_rvalue (x, !lval);
2899 static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
2901 /* Subroutine of cp_fold. Returns which fold cache to use according
2902 to the given flags. We need multiple caches since the result of
2903 folding may depend on which flags are used. */
2905 static hash_map<tree, tree> *&
2906 get_fold_cache (fold_flags_t flags)
2908 if (flags & ff_mce_false)
2909 return fold_caches[1];
2910 else
2911 return fold_caches[0];
2914 /* Dispose of the whole FOLD_CACHE. */
2916 void
2917 clear_fold_cache (void)
2919 for (auto& fold_cache : fold_caches)
2920 if (fold_cache != NULL)
2921 fold_cache->empty ();
2924 /* This function tries to fold an expression X.
2925 To avoid combinatorial explosion, folding results are kept in fold_cache.
2926 If X is invalid, we don't fold at all.
2927 For performance reasons we don't cache expressions representing a
2928 declaration or constant.
2929 Function returns X or its folded variant. */
2931 static tree
2932 cp_fold (tree x, fold_flags_t flags)
2934 tree op0, op1, op2, op3;
2935 tree org_x = x, r = NULL_TREE;
2936 enum tree_code code;
2937 location_t loc;
2938 bool rval_ops = true;
2940 if (!x || x == error_mark_node)
2941 return x;
2943 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2944 return x;
2946 /* Don't bother to cache DECLs or constants. */
2947 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2948 return x;
2950 auto& fold_cache = get_fold_cache (flags);
2951 if (fold_cache == NULL)
2952 fold_cache = hash_map<tree, tree>::create_ggc (101);
2954 if (tree *cached = fold_cache->get (x))
2956 /* unshare_expr doesn't recurse into SAVE_EXPRs. If SAVE_EXPR's
2957 argument has been folded into a tree invariant, make sure it is
2958 unshared. See PR112727. */
2959 if (TREE_CODE (x) == SAVE_EXPR && *cached != x)
2960 return unshare_expr (*cached);
2961 return *cached;
2964 uid_sensitive_constexpr_evaluation_checker c;
2966 code = TREE_CODE (x);
2967 switch (code)
2969 case CLEANUP_POINT_EXPR:
2970 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2971 effects. */
2972 r = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2973 if (!TREE_SIDE_EFFECTS (r))
2974 x = r;
2975 break;
2977 case SIZEOF_EXPR:
2978 x = fold_sizeof_expr (x);
2979 break;
2981 case VIEW_CONVERT_EXPR:
2982 rval_ops = false;
2983 /* FALLTHRU */
2984 case NON_LVALUE_EXPR:
2985 CASE_CONVERT:
2987 if (VOID_TYPE_P (TREE_TYPE (x)))
2989 /* This is just to make sure we don't end up with casts to
2990 void from error_mark_node. If we just return x, then
2991 cp_fold_r might fold the operand into error_mark_node and
2992 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2993 during gimplification doesn't like such casts.
2994 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2995 folding of the operand should be in the caches and if in cp_fold_r
2996 it will modify it in place. */
2997 op0 = cp_fold (TREE_OPERAND (x, 0), flags);
2998 if (op0 == error_mark_node)
2999 x = error_mark_node;
3000 break;
3003 loc = EXPR_LOCATION (x);
3004 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3006 if (code == CONVERT_EXPR
3007 && SCALAR_TYPE_P (TREE_TYPE (x))
3008 && op0 != void_node)
3009 /* During parsing we used convert_to_*_nofold; re-convert now using the
3010 folding variants, since fold() doesn't do those transformations. */
3011 x = fold (convert (TREE_TYPE (x), op0));
3012 else if (op0 != TREE_OPERAND (x, 0))
3014 if (op0 == error_mark_node)
3015 x = error_mark_node;
3016 else
3017 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
3019 else
3020 x = fold (x);
3022 /* Conversion of an out-of-range value has implementation-defined
3023 behavior; the language considers it different from arithmetic
3024 overflow, which is undefined. */
3025 if (TREE_CODE (op0) == INTEGER_CST
3026 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
3027 TREE_OVERFLOW (x) = false;
3029 break;
3031 case EXCESS_PRECISION_EXPR:
3032 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3033 x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
3034 break;
3036 case INDIRECT_REF:
3037 /* We don't need the decltype(auto) obfuscation anymore. */
3038 if (REF_PARENTHESIZED_P (x))
3040 tree p = maybe_undo_parenthesized_ref (x);
3041 if (p != x)
3042 return cp_fold (p, flags);
3044 goto unary;
3046 case ADDR_EXPR:
3047 loc = EXPR_LOCATION (x);
3048 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
3050 /* Cope with user tricks that amount to offsetof. */
3051 if (op0 != error_mark_node
3052 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
3054 tree val = get_base_address (op0);
3055 if (val
3056 && INDIRECT_REF_P (val)
3057 && COMPLETE_TYPE_P (TREE_TYPE (val))
3058 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
3060 val = TREE_OPERAND (val, 0);
3061 STRIP_NOPS (val);
3062 val = maybe_constant_value (val);
3063 if (TREE_CODE (val) == INTEGER_CST)
3064 return fold_offsetof (op0, TREE_TYPE (x));
3067 goto finish_unary;
3069 case REALPART_EXPR:
3070 case IMAGPART_EXPR:
3071 rval_ops = false;
3072 /* FALLTHRU */
3073 case CONJ_EXPR:
3074 case FIX_TRUNC_EXPR:
3075 case FLOAT_EXPR:
3076 case NEGATE_EXPR:
3077 case ABS_EXPR:
3078 case ABSU_EXPR:
3079 case BIT_NOT_EXPR:
3080 case TRUTH_NOT_EXPR:
3081 case FIXED_CONVERT_EXPR:
3082 unary:
3084 loc = EXPR_LOCATION (x);
3085 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3087 finish_unary:
3088 if (op0 != TREE_OPERAND (x, 0))
3090 if (op0 == error_mark_node)
3091 x = error_mark_node;
3092 else
3094 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
3095 if (code == INDIRECT_REF
3096 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
3098 TREE_READONLY (x) = TREE_READONLY (org_x);
3099 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3100 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3104 else
3105 x = fold (x);
3107 gcc_assert (TREE_CODE (x) != COND_EXPR
3108 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
3109 break;
3111 case UNARY_PLUS_EXPR:
3112 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
3113 if (op0 == error_mark_node)
3114 x = error_mark_node;
3115 else
3116 x = fold_convert (TREE_TYPE (x), op0);
3117 break;
3119 case POSTDECREMENT_EXPR:
3120 case POSTINCREMENT_EXPR:
3121 case INIT_EXPR:
3122 case PREDECREMENT_EXPR:
3123 case PREINCREMENT_EXPR:
3124 case COMPOUND_EXPR:
3125 case MODIFY_EXPR:
3126 rval_ops = false;
3127 /* FALLTHRU */
3128 case POINTER_PLUS_EXPR:
3129 case PLUS_EXPR:
3130 case POINTER_DIFF_EXPR:
3131 case MINUS_EXPR:
3132 case MULT_EXPR:
3133 case TRUNC_DIV_EXPR:
3134 case CEIL_DIV_EXPR:
3135 case FLOOR_DIV_EXPR:
3136 case ROUND_DIV_EXPR:
3137 case TRUNC_MOD_EXPR:
3138 case CEIL_MOD_EXPR:
3139 case ROUND_MOD_EXPR:
3140 case RDIV_EXPR:
3141 case EXACT_DIV_EXPR:
3142 case MIN_EXPR:
3143 case MAX_EXPR:
3144 case LSHIFT_EXPR:
3145 case RSHIFT_EXPR:
3146 case LROTATE_EXPR:
3147 case RROTATE_EXPR:
3148 case BIT_AND_EXPR:
3149 case BIT_IOR_EXPR:
3150 case BIT_XOR_EXPR:
3151 case TRUTH_AND_EXPR:
3152 case TRUTH_ANDIF_EXPR:
3153 case TRUTH_OR_EXPR:
3154 case TRUTH_ORIF_EXPR:
3155 case TRUTH_XOR_EXPR:
3156 case LT_EXPR: case LE_EXPR:
3157 case GT_EXPR: case GE_EXPR:
3158 case EQ_EXPR: case NE_EXPR:
3159 case UNORDERED_EXPR: case ORDERED_EXPR:
3160 case UNLT_EXPR: case UNLE_EXPR:
3161 case UNGT_EXPR: case UNGE_EXPR:
3162 case UNEQ_EXPR: case LTGT_EXPR:
3163 case RANGE_EXPR: case COMPLEX_EXPR:
3165 loc = EXPR_LOCATION (x);
3166 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3167 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1), flags);
3169 /* decltype(nullptr) has only one value, so optimize away all comparisons
3170 with that type right away, keeping them in the IL causes troubles for
3171 various optimizations. */
3172 if (COMPARISON_CLASS_P (org_x)
3173 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
3174 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
3176 switch (code)
3178 case EQ_EXPR:
3179 x = constant_boolean_node (true, TREE_TYPE (x));
3180 break;
3181 case NE_EXPR:
3182 x = constant_boolean_node (false, TREE_TYPE (x));
3183 break;
3184 default:
3185 gcc_unreachable ();
3187 return omit_two_operands_loc (loc, TREE_TYPE (x), x,
3188 op0, op1);
3191 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
3193 if (op0 == error_mark_node || op1 == error_mark_node)
3194 x = error_mark_node;
3195 else
3196 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
3198 else
3199 x = fold (x);
3201 /* This is only needed for -Wnonnull-compare and only if
3202 TREE_NO_WARNING (org_x), but to avoid that option affecting code
3203 generation, we do it always. */
3204 if (COMPARISON_CLASS_P (org_x))
3206 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
3208 else if (COMPARISON_CLASS_P (x))
3210 if (warn_nonnull_compare
3211 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
3212 suppress_warning (x, OPT_Wnonnull_compare);
3214 /* Otherwise give up on optimizing these, let GIMPLE folders
3215 optimize those later on. */
3216 else if (op0 != TREE_OPERAND (org_x, 0)
3217 || op1 != TREE_OPERAND (org_x, 1))
3219 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
3220 if (warn_nonnull_compare
3221 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
3222 suppress_warning (x, OPT_Wnonnull_compare);
3224 else
3225 x = org_x;
3228 break;
3230 case VEC_COND_EXPR:
3231 case COND_EXPR:
3232 loc = EXPR_LOCATION (x);
3233 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
3234 op1 = cp_fold (TREE_OPERAND (x, 1), flags);
3235 op2 = cp_fold (TREE_OPERAND (x, 2), flags);
3237 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
3239 warning_sentinel s (warn_int_in_bool_context);
3240 if (!VOID_TYPE_P (TREE_TYPE (op1)))
3241 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
3242 if (!VOID_TYPE_P (TREE_TYPE (op2)))
3243 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
3245 else if (VOID_TYPE_P (TREE_TYPE (x)))
3247 if (TREE_CODE (op0) == INTEGER_CST)
3249 /* If the condition is constant, fold can fold away
3250 the COND_EXPR. If some statement-level uses of COND_EXPR
3251 have one of the branches NULL, avoid folding crash. */
3252 if (!op1)
3253 op1 = build_empty_stmt (loc);
3254 if (!op2)
3255 op2 = build_empty_stmt (loc);
3257 else
3259 /* Otherwise, don't bother folding a void condition, since
3260 it can't produce a constant value. */
3261 if (op0 != TREE_OPERAND (x, 0)
3262 || op1 != TREE_OPERAND (x, 1)
3263 || op2 != TREE_OPERAND (x, 2))
3264 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
3265 break;
3269 if (op0 != TREE_OPERAND (x, 0)
3270 || op1 != TREE_OPERAND (x, 1)
3271 || op2 != TREE_OPERAND (x, 2))
3273 if (op0 == error_mark_node
3274 || op1 == error_mark_node
3275 || op2 == error_mark_node)
3276 x = error_mark_node;
3277 else
3278 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
3280 else
3281 x = fold (x);
3283 /* A COND_EXPR might have incompatible types in branches if one or both
3284 arms are bitfields. If folding exposed such a branch, fix it up. */
3285 if (TREE_CODE (x) != code
3286 && x != error_mark_node
3287 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
3288 x = fold_convert (TREE_TYPE (org_x), x);
3290 break;
3292 case CALL_EXPR:
3294 tree callee = get_callee_fndecl (x);
3296 /* "Inline" calls to std::move/forward and other cast-like functions
3297 by simply folding them into a corresponding cast to their return
3298 type. This is cheaper than relying on the middle end to do so, and
3299 also means we avoid generating useless debug info for them at all.
3301 At this point the argument has already been converted into a
3302 reference, so it suffices to use a NOP_EXPR to express the
3303 cast. */
3304 if ((OPTION_SET_P (flag_fold_simple_inlines)
3305 ? flag_fold_simple_inlines
3306 : !flag_no_inline)
3307 && call_expr_nargs (x) == 1
3308 && decl_in_std_namespace_p (callee)
3309 && DECL_NAME (callee) != NULL_TREE
3310 && (id_equal (DECL_NAME (callee), "move")
3311 || id_equal (DECL_NAME (callee), "forward")
3312 || id_equal (DECL_NAME (callee), "addressof")
3313 /* This addressof equivalent is used heavily in libstdc++. */
3314 || id_equal (DECL_NAME (callee), "__addressof")
3315 || id_equal (DECL_NAME (callee), "as_const")))
3317 r = CALL_EXPR_ARG (x, 0);
3318 /* Check that the return and argument types are sane before
3319 folding. */
3320 if (INDIRECT_TYPE_P (TREE_TYPE (x))
3321 && INDIRECT_TYPE_P (TREE_TYPE (r)))
3323 if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
3324 r = build_nop (TREE_TYPE (x), r);
3325 x = cp_fold (r, flags);
3326 break;
3330 int sv = optimize, nw = sv;
3332 /* Some built-in function calls will be evaluated at compile-time in
3333 fold (). Set optimize to 1 when folding __builtin_constant_p inside
3334 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3335 if (callee && fndecl_built_in_p (callee) && !optimize
3336 && DECL_IS_BUILTIN_CONSTANT_P (callee)
3337 && current_function_decl
3338 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
3339 nw = 1;
3341 if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
3343 iloc_sentinel ils (EXPR_LOCATION (x));
3344 switch (DECL_FE_FUNCTION_CODE (callee))
3346 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
3347 /* Defer folding __builtin_is_constant_evaluated unless
3348 we know this isn't a manifestly constant-evaluated
3349 context. */
3350 if (flags & ff_mce_false)
3351 x = boolean_false_node;
3352 break;
3353 case CP_BUILT_IN_SOURCE_LOCATION:
3354 x = fold_builtin_source_location (x);
3355 break;
3356 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
3357 x = fold_builtin_is_corresponding_member
3358 (EXPR_LOCATION (x), call_expr_nargs (x),
3359 &CALL_EXPR_ARG (x, 0));
3360 break;
3361 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
3362 x = fold_builtin_is_pointer_inverconvertible_with_class
3363 (EXPR_LOCATION (x), call_expr_nargs (x),
3364 &CALL_EXPR_ARG (x, 0));
3365 break;
3366 default:
3367 break;
3369 break;
3372 if (callee
3373 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
3374 BUILT_IN_FRONTEND))
3376 x = fold_builtin_source_location (x);
3377 break;
3380 bool changed = false;
3381 int m = call_expr_nargs (x);
3382 for (int i = 0; i < m; i++)
3384 r = cp_fold (CALL_EXPR_ARG (x, i), flags);
3385 if (r != CALL_EXPR_ARG (x, i))
3387 if (r == error_mark_node)
3389 x = error_mark_node;
3390 break;
3392 if (!changed)
3393 x = copy_node (x);
3394 CALL_EXPR_ARG (x, i) = r;
3395 changed = true;
3398 if (x == error_mark_node)
3399 break;
3401 optimize = nw;
3402 r = fold (x);
3403 optimize = sv;
3405 if (TREE_CODE (r) != CALL_EXPR)
3407 x = cp_fold (r, flags);
3408 break;
3411 optimize = nw;
3413 /* Invoke maybe_constant_value for functions declared
3414 constexpr and not called with AGGR_INIT_EXPRs.
3415 TODO:
3416 Do constexpr expansion of expressions where the call itself is not
3417 constant, but the call followed by an INDIRECT_REF is. */
3418 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
3419 && !flag_no_inline)
3421 mce_value manifestly_const_eval = mce_unknown;
3422 if (flags & ff_mce_false)
3423 /* Allow folding __builtin_is_constant_evaluated to false during
3424 constexpr evaluation of this call. */
3425 manifestly_const_eval = mce_false;
3426 r = maybe_constant_value (x, /*decl=*/NULL_TREE,
3427 manifestly_const_eval);
3429 optimize = sv;
3431 if (TREE_CODE (r) != CALL_EXPR)
3433 if (DECL_CONSTRUCTOR_P (callee))
3435 loc = EXPR_LOCATION (x);
3436 tree a = CALL_EXPR_ARG (x, 0);
3437 bool return_this = targetm.cxx.cdtor_returns_this ();
3438 if (return_this)
3439 a = cp_save_expr (a);
3440 tree s = build_fold_indirect_ref_loc (loc, a);
3441 r = cp_build_init_expr (s, r);
3442 if (return_this)
3443 r = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (x), r,
3444 fold_convert_loc (loc, TREE_TYPE (x), a));
3446 x = r;
3447 break;
3450 break;
3453 case CONSTRUCTOR:
3455 unsigned i;
3456 constructor_elt *p;
3457 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
3458 vec<constructor_elt, va_gc> *nelts = NULL;
3459 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
3461 tree op = cp_fold (p->value, flags);
3462 if (op != p->value)
3464 if (op == error_mark_node)
3466 x = error_mark_node;
3467 vec_free (nelts);
3468 break;
3470 if (nelts == NULL)
3471 nelts = elts->copy ();
3472 (*nelts)[i].value = op;
3475 if (nelts)
3477 x = build_constructor (TREE_TYPE (x), nelts);
3478 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
3479 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
3480 CONSTRUCTOR_MUTABLE_POISON (x)
3481 = CONSTRUCTOR_MUTABLE_POISON (org_x);
3483 if (VECTOR_TYPE_P (TREE_TYPE (x)))
3484 x = fold (x);
3485 break;
3487 case TREE_VEC:
3489 bool changed = false;
3490 int n = TREE_VEC_LENGTH (x);
3492 for (int i = 0; i < n; i++)
3494 tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
3495 if (op != TREE_VEC_ELT (x, i))
3497 if (!changed)
3498 x = copy_node (x);
3499 TREE_VEC_ELT (x, i) = op;
3500 changed = true;
3505 break;
3507 case ARRAY_REF:
3508 case ARRAY_RANGE_REF:
3510 loc = EXPR_LOCATION (x);
3511 op0 = cp_fold (TREE_OPERAND (x, 0), flags);
3512 op1 = cp_fold (TREE_OPERAND (x, 1), flags);
3513 op2 = cp_fold (TREE_OPERAND (x, 2), flags);
3514 op3 = cp_fold (TREE_OPERAND (x, 3), flags);
3516 if (op0 != TREE_OPERAND (x, 0)
3517 || op1 != TREE_OPERAND (x, 1)
3518 || op2 != TREE_OPERAND (x, 2)
3519 || op3 != TREE_OPERAND (x, 3))
3521 if (op0 == error_mark_node
3522 || op1 == error_mark_node
3523 || op2 == error_mark_node
3524 || op3 == error_mark_node)
3525 x = error_mark_node;
3526 else
3528 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3529 TREE_READONLY (x) = TREE_READONLY (org_x);
3530 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3531 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3535 x = fold (x);
3536 break;
3538 case SAVE_EXPR:
3539 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3540 folding, evaluates to an invariant. In that case no need to wrap
3541 this folded tree with a SAVE_EXPR. */
3542 r = cp_fold (TREE_OPERAND (x, 0), flags);
3543 if (tree_invariant_p (r))
3544 x = r;
3545 break;
3547 case REQUIRES_EXPR:
3548 x = evaluate_requires_expr (x);
3549 break;
3551 default:
3552 return org_x;
3555 if (EXPR_P (x) && TREE_CODE (x) == code)
3557 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3558 copy_warning (x, org_x);
3561 if (!c.evaluation_restricted_p ())
3563 fold_cache->put (org_x, x);
3564 /* Prevent that we try to fold an already folded result again. */
3565 if (x != org_x)
3566 fold_cache->put (x, x);
3569 return x;
3572 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3574 tree
3575 lookup_hotness_attribute (tree list)
3577 for (; list; list = TREE_CHAIN (list))
3579 tree name = get_attribute_name (list);
3580 if ((is_attribute_p ("hot", name)
3581 || is_attribute_p ("cold", name)
3582 || is_attribute_p ("likely", name)
3583 || is_attribute_p ("unlikely", name))
3584 && is_attribute_namespace_p ("", list))
3585 break;
3587 return list;
3590 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3592 static tree
3593 remove_hotness_attribute (tree list)
3595 for (tree *p = &list; *p; )
3597 tree l = *p;
3598 tree name = get_attribute_name (l);
3599 if ((is_attribute_p ("hot", name)
3600 || is_attribute_p ("cold", name)
3601 || is_attribute_p ("likely", name)
3602 || is_attribute_p ("unlikely", name))
3603 && is_attribute_namespace_p ("", l))
3605 *p = TREE_CHAIN (l);
3606 continue;
3608 p = &TREE_CHAIN (l);
3610 return list;
3613 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3614 PREDICT_EXPR. */
3616 tree
3617 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3619 if (std_attrs == error_mark_node)
3620 return std_attrs;
3621 if (tree attr = lookup_hotness_attribute (std_attrs))
3623 tree name = get_attribute_name (attr);
3624 bool hot = (is_attribute_p ("hot", name)
3625 || is_attribute_p ("likely", name));
3626 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3627 hot ? TAKEN : NOT_TAKEN);
3628 SET_EXPR_LOCATION (pred, attrs_loc);
3629 add_stmt (pred);
3630 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3631 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3632 get_attribute_name (other), name);
3633 std_attrs = remove_hotness_attribute (std_attrs);
3635 return std_attrs;
3638 /* Build IFN_ASSUME internal call for assume condition ARG. */
3640 tree
3641 build_assume_call (location_t loc, tree arg)
3643 if (!processing_template_decl)
3644 arg = fold_build_cleanup_point_expr (TREE_TYPE (arg), arg);
3645 return build_call_expr_internal_loc (loc, IFN_ASSUME, void_type_node,
3646 1, arg);
3649 /* If [[assume (cond)]] appears on this statement, handle it. */
3651 tree
3652 process_stmt_assume_attribute (tree std_attrs, tree statement,
3653 location_t attrs_loc)
3655 if (std_attrs == error_mark_node)
3656 return std_attrs;
3657 tree attr = lookup_attribute ("gnu", "assume", std_attrs);
3658 if (!attr)
3659 return std_attrs;
3660 /* The next token after the assume attribute is not ';'. */
3661 if (statement)
3663 warning_at (attrs_loc, OPT_Wattributes,
3664 "%<assume%> attribute not followed by %<;%>");
3665 attr = NULL_TREE;
3667 for (; attr; attr = lookup_attribute ("gnu", "assume", TREE_CHAIN (attr)))
3669 tree args = TREE_VALUE (attr);
3670 if (args && PACK_EXPANSION_P (args))
3672 auto_diagnostic_group d;
3673 error_at (attrs_loc, "pack expansion of %qE attribute",
3674 get_attribute_name (attr));
3675 if (cxx_dialect >= cxx17)
3676 inform (attrs_loc, "use fold expression in the attribute "
3677 "argument instead");
3678 continue;
3680 int nargs = list_length (args);
3681 if (nargs != 1)
3683 auto_diagnostic_group d;
3684 error_at (attrs_loc, "wrong number of arguments specified for "
3685 "%qE attribute", get_attribute_name (attr));
3686 inform (attrs_loc, "expected %i, found %i", 1, nargs);
3688 else
3690 tree arg = TREE_VALUE (args);
3691 if (!type_dependent_expression_p (arg))
3692 arg = contextual_conv_bool (arg, tf_warning_or_error);
3693 if (error_operand_p (arg))
3694 continue;
3695 finish_expr_stmt (build_assume_call (attrs_loc, arg));
3698 return remove_attribute ("gnu", "assume", std_attrs);
3701 /* Return the type std::source_location::__impl after performing
3702 verification on it. */
3704 tree
3705 get_source_location_impl_type ()
3707 tree name = get_identifier ("source_location");
3708 tree decl = lookup_qualified_name (std_node, name);
3709 if (TREE_CODE (decl) != TYPE_DECL)
3711 auto_diagnostic_group d;
3712 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3713 qualified_name_lookup_error (std_node, name, decl, input_location);
3714 else
3715 error ("%qD is not a type", decl);
3716 return error_mark_node;
3718 name = get_identifier ("__impl");
3719 tree type = TREE_TYPE (decl);
3720 decl = lookup_qualified_name (type, name);
3721 if (TREE_CODE (decl) != TYPE_DECL)
3723 auto_diagnostic_group d;
3724 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3725 qualified_name_lookup_error (type, name, decl, input_location);
3726 else
3727 error ("%qD is not a type", decl);
3728 return error_mark_node;
3730 type = TREE_TYPE (decl);
3731 if (TREE_CODE (type) != RECORD_TYPE)
3733 error ("%qD is not a class type", decl);
3734 return error_mark_node;
3737 int cnt = 0;
3738 for (tree field = TYPE_FIELDS (type);
3739 (field = next_aggregate_field (field)) != NULL_TREE;
3740 field = DECL_CHAIN (field))
3742 if (DECL_NAME (field) != NULL_TREE)
3744 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3745 if (strcmp (n, "_M_file_name") == 0
3746 || strcmp (n, "_M_function_name") == 0)
3748 if (TREE_TYPE (field) != const_string_type_node)
3750 error ("%qD does not have %<const char *%> type", field);
3751 return error_mark_node;
3753 cnt++;
3754 continue;
3756 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3758 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3760 error ("%qD does not have integral type", field);
3761 return error_mark_node;
3763 cnt++;
3764 continue;
3767 cnt = 0;
3768 break;
3770 if (cnt != 4)
3772 error ("%<std::source_location::__impl%> does not contain only "
3773 "non-static data members %<_M_file_name%>, "
3774 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3775 return error_mark_node;
3777 return build_qualified_type (type, TYPE_QUAL_CONST);
3780 /* Type for source_location_table hash_set. */
3781 struct GTY((for_user)) source_location_table_entry {
3782 location_t loc;
3783 unsigned uid;
3784 tree var;
3787 /* Traits class for function start hash maps below. */
3789 struct source_location_table_entry_hash
3790 : ggc_remove <source_location_table_entry>
3792 typedef source_location_table_entry value_type;
3793 typedef source_location_table_entry compare_type;
3795 static hashval_t
3796 hash (const source_location_table_entry &ref)
3798 inchash::hash hstate (0);
3799 hstate.add_int (ref.loc);
3800 hstate.add_int (ref.uid);
3801 return hstate.end ();
3804 static bool
3805 equal (const source_location_table_entry &ref1,
3806 const source_location_table_entry &ref2)
3808 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3811 static void
3812 mark_deleted (source_location_table_entry &ref)
3814 ref.loc = UNKNOWN_LOCATION;
3815 ref.uid = -1U;
3816 ref.var = NULL_TREE;
3819 static const bool empty_zero_p = true;
3821 static void
3822 mark_empty (source_location_table_entry &ref)
3824 ref.loc = UNKNOWN_LOCATION;
3825 ref.uid = 0;
3826 ref.var = NULL_TREE;
3829 static bool
3830 is_deleted (const source_location_table_entry &ref)
3832 return (ref.loc == UNKNOWN_LOCATION
3833 && ref.uid == -1U
3834 && ref.var == NULL_TREE);
3837 static bool
3838 is_empty (const source_location_table_entry &ref)
3840 return (ref.loc == UNKNOWN_LOCATION
3841 && ref.uid == 0
3842 && ref.var == NULL_TREE);
3845 static void
3846 pch_nx (source_location_table_entry &p)
3848 extern void gt_pch_nx (source_location_table_entry &);
3849 gt_pch_nx (p);
3852 static void
3853 pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3855 extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3856 void *);
3857 gt_pch_nx (&p, op, cookie);
3861 static GTY(()) hash_table <source_location_table_entry_hash>
3862 *source_location_table;
3863 static GTY(()) unsigned int source_location_id;
3865 /* Fold the __builtin_source_location () call T. */
3867 tree
3868 fold_builtin_source_location (const_tree t)
3870 gcc_assert (TREE_CODE (t) == CALL_EXPR);
3871 /* TREE_TYPE (t) is const std::source_location::__impl* */
3872 tree source_location_impl = TREE_TYPE (TREE_TYPE (t));
3873 if (source_location_impl == error_mark_node)
3874 return build_zero_cst (const_ptr_type_node);
3875 gcc_assert (CLASS_TYPE_P (source_location_impl)
3876 && id_equal (TYPE_IDENTIFIER (source_location_impl), "__impl"));
3878 location_t loc = EXPR_LOCATION (t);
3879 if (source_location_table == NULL)
3880 source_location_table
3881 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3882 const line_map_ordinary *map;
3883 source_location_table_entry entry;
3884 entry.loc
3885 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3886 &map);
3887 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3888 entry.var = error_mark_node;
3889 source_location_table_entry *entryp
3890 = source_location_table->find_slot (entry, INSERT);
3891 tree var;
3892 if (entryp->var)
3893 var = entryp->var;
3894 else
3896 char tmp_name[32];
3897 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3898 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3899 source_location_impl);
3900 TREE_STATIC (var) = 1;
3901 TREE_PUBLIC (var) = 0;
3902 DECL_ARTIFICIAL (var) = 1;
3903 DECL_IGNORED_P (var) = 1;
3904 DECL_EXTERNAL (var) = 0;
3905 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3906 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3907 layout_decl (var, 0);
3909 vec<constructor_elt, va_gc> *v = NULL;
3910 vec_alloc (v, 4);
3911 for (tree field = TYPE_FIELDS (source_location_impl);
3912 (field = next_aggregate_field (field)) != NULL_TREE;
3913 field = DECL_CHAIN (field))
3915 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3916 tree val = NULL_TREE;
3917 if (strcmp (n, "_M_file_name") == 0)
3919 if (const char *fname = LOCATION_FILE (loc))
3921 fname = remap_macro_filename (fname);
3922 val = build_string_literal (fname);
3924 else
3925 val = build_string_literal ("");
3927 else if (strcmp (n, "_M_function_name") == 0)
3929 const char *name = "";
3931 if (current_function_decl)
3932 name = cxx_printable_name (current_function_decl, 2);
3934 val = build_string_literal (name);
3936 else if (strcmp (n, "_M_line") == 0)
3937 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3938 else if (strcmp (n, "_M_column") == 0)
3939 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3940 else
3941 gcc_unreachable ();
3942 CONSTRUCTOR_APPEND_ELT (v, field, val);
3945 tree ctor = build_constructor (source_location_impl, v);
3946 TREE_CONSTANT (ctor) = 1;
3947 TREE_STATIC (ctor) = 1;
3948 DECL_INITIAL (var) = ctor;
3949 varpool_node::finalize_decl (var);
3950 *entryp = entry;
3951 entryp->var = var;
3954 return build_fold_addr_expr_with_type_loc (loc, var, TREE_TYPE (t));
3957 #include "gt-cp-cp-gimplify.h"