d: Merge upstream dmd, druntime c8ae4adb2e, phobos 792c8b7c1.
[official-gcc.git] / gcc / cp / cp-gimplify.cc
blob983f2a566a66f4498d1d7e27734c507200a38481
1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44 #include "opts.h"
46 /* Forward declarations. */
48 static tree cp_genericize_r (tree *, int *, void *);
49 static tree cp_fold_r (tree *, int *, void *);
50 static void cp_genericize_tree (tree*, bool);
51 static tree cp_fold (tree);
53 /* Genericize a TRY_BLOCK. */
55 static void
56 genericize_try_block (tree *stmt_p)
58 tree body = TRY_STMTS (*stmt_p);
59 tree cleanup = TRY_HANDLERS (*stmt_p);
61 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
64 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
66 static void
67 genericize_catch_block (tree *stmt_p)
69 tree type = HANDLER_TYPE (*stmt_p);
70 tree body = HANDLER_BODY (*stmt_p);
72 /* FIXME should the caught type go in TREE_TYPE? */
73 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
76 /* A terser interface for building a representation of an exception
77 specification. */
79 static tree
80 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
82 tree t;
84 /* FIXME should the allowed types go in TREE_TYPE? */
85 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
86 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
88 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
89 append_to_statement_list (body, &TREE_OPERAND (t, 0));
91 return t;
94 /* Genericize an EH_SPEC_BLOCK by converting it to a
95 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
97 static void
98 genericize_eh_spec_block (tree *stmt_p)
100 tree body = EH_SPEC_STMTS (*stmt_p);
101 tree allowed = EH_SPEC_RAISES (*stmt_p);
102 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
104 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
105 suppress_warning (*stmt_p);
106 suppress_warning (TREE_OPERAND (*stmt_p, 1));
109 /* Return the first non-compound statement in STMT. */
111 tree
112 first_stmt (tree stmt)
114 switch (TREE_CODE (stmt))
116 case STATEMENT_LIST:
117 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
118 return first_stmt (p->stmt);
119 return void_node;
121 case BIND_EXPR:
122 return first_stmt (BIND_EXPR_BODY (stmt));
124 default:
125 return stmt;
129 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
131 static void
132 genericize_if_stmt (tree *stmt_p)
134 tree stmt, cond, then_, else_;
135 location_t locus = EXPR_LOCATION (*stmt_p);
137 stmt = *stmt_p;
138 cond = IF_COND (stmt);
139 then_ = THEN_CLAUSE (stmt);
140 else_ = ELSE_CLAUSE (stmt);
142 if (then_ && else_)
144 tree ft = first_stmt (then_);
145 tree fe = first_stmt (else_);
146 br_predictor pr;
147 if (TREE_CODE (ft) == PREDICT_EXPR
148 && TREE_CODE (fe) == PREDICT_EXPR
149 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
150 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
152 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
153 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
154 warning_at (&richloc, OPT_Wattributes,
155 "both branches of %<if%> statement marked as %qs",
156 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
160 if (!then_)
161 then_ = build_empty_stmt (locus);
162 if (!else_)
163 else_ = build_empty_stmt (locus);
165 /* consteval if has been verified not to have the then_/else_ blocks
166 entered by gotos/case labels from elsewhere, and as then_ block
167 can contain unfolded immediate function calls, we have to discard
168 the then_ block regardless of whether else_ has side-effects or not. */
169 if (IF_STMT_CONSTEVAL_P (stmt))
171 if (block_may_fallthru (then_))
172 stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
173 void_node, else_);
174 else
175 stmt = else_;
177 else if (IF_STMT_CONSTEXPR_P (stmt))
178 stmt = integer_nonzerop (cond) ? then_ : else_;
179 else
180 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
181 protected_set_expr_location_if_unset (stmt, locus);
182 *stmt_p = stmt;
185 /* Hook into the middle of gimplifying an OMP_FOR node. */
187 static enum gimplify_status
188 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
190 tree for_stmt = *expr_p;
191 gimple_seq seq = NULL;
193 /* Protect ourselves from recursion. */
194 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
195 return GS_UNHANDLED;
196 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
198 gimplify_and_add (for_stmt, &seq);
199 gimple_seq_add_seq (pre_p, seq);
201 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
203 return GS_ALL_DONE;
206 /* Gimplify an EXPR_STMT node. */
208 static void
209 gimplify_expr_stmt (tree *stmt_p)
211 tree stmt = EXPR_STMT_EXPR (*stmt_p);
213 if (stmt == error_mark_node)
214 stmt = NULL;
216 /* Gimplification of a statement expression will nullify the
217 statement if all its side effects are moved to *PRE_P and *POST_P.
219 In this case we will not want to emit the gimplified statement.
220 However, we may still want to emit a warning, so we do that before
221 gimplification. */
222 if (stmt && warn_unused_value)
224 if (!TREE_SIDE_EFFECTS (stmt))
226 if (!IS_EMPTY_STMT (stmt)
227 && !VOID_TYPE_P (TREE_TYPE (stmt))
228 && !warning_suppressed_p (stmt, OPT_Wunused_value))
229 warning (OPT_Wunused_value, "statement with no effect");
231 else
232 warn_if_unused_value (stmt, input_location);
235 if (stmt == NULL_TREE)
236 stmt = alloc_stmt_list ();
238 *stmt_p = stmt;
241 /* Gimplify initialization from an AGGR_INIT_EXPR. */
243 static void
244 cp_gimplify_init_expr (tree *expr_p)
246 tree from = TREE_OPERAND (*expr_p, 1);
247 tree to = TREE_OPERAND (*expr_p, 0);
248 tree t;
250 if (TREE_CODE (from) == TARGET_EXPR)
251 if (tree init = TARGET_EXPR_INITIAL (from))
253 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from));
254 if (target_expr_needs_replace (from))
256 /* If this was changed by cp_genericize_target_expr, we need to
257 walk into it to replace uses of the slot. */
258 replace_decl (&init, TARGET_EXPR_SLOT (from), to);
259 *expr_p = init;
260 return;
262 else
263 from = init;
266 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
267 inside the TARGET_EXPR. */
268 for (t = from; t; )
270 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
272 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
273 replace the slot operand with our target.
275 Should we add a target parm to gimplify_expr instead? No, as in this
276 case we want to replace the INIT_EXPR. */
277 if (TREE_CODE (sub) == AGGR_INIT_EXPR
278 || TREE_CODE (sub) == VEC_INIT_EXPR)
280 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
281 AGGR_INIT_EXPR_SLOT (sub) = to;
282 else
283 VEC_INIT_EXPR_SLOT (sub) = to;
284 *expr_p = from;
286 /* The initialization is now a side-effect, so the container can
287 become void. */
288 if (from != sub)
289 TREE_TYPE (from) = void_type_node;
292 /* Handle aggregate NSDMI. */
293 replace_placeholders (sub, to);
295 if (t == sub)
296 break;
297 else
298 t = TREE_OPERAND (t, 1);
303 /* Gimplify a MUST_NOT_THROW_EXPR. */
305 static enum gimplify_status
306 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
308 tree stmt = *expr_p;
309 tree temp = voidify_wrapper_expr (stmt, NULL);
310 tree body = TREE_OPERAND (stmt, 0);
311 gimple_seq try_ = NULL;
312 gimple_seq catch_ = NULL;
313 gimple *mnt;
315 gimplify_and_add (body, &try_);
316 mnt = gimple_build_eh_must_not_throw (terminate_fn);
317 gimple_seq_add_stmt_without_update (&catch_, mnt);
318 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
320 gimple_seq_add_stmt_without_update (pre_p, mnt);
321 if (temp)
323 *expr_p = temp;
324 return GS_OK;
327 *expr_p = NULL;
328 return GS_ALL_DONE;
331 /* Return TRUE if an operand (OP) of a given TYPE being copied is
332 really just an empty class copy.
334 Check that the operand has a simple form so that TARGET_EXPRs and
335 non-empty CONSTRUCTORs get reduced properly, and we leave the
336 return slot optimization alone because it isn't a copy. */
338 bool
339 simple_empty_class_p (tree type, tree op, tree_code code)
341 if (TREE_CODE (op) == COMPOUND_EXPR)
342 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
343 if (SIMPLE_TARGET_EXPR_P (op)
344 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
345 /* The TARGET_EXPR is itself a simple copy, look through it. */
346 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
348 if (TREE_CODE (op) == PARM_DECL
349 && TREE_ADDRESSABLE (TREE_TYPE (op)))
351 tree fn = DECL_CONTEXT (op);
352 if (DECL_THUNK_P (fn)
353 || lambda_static_thunk_p (fn))
354 /* In a thunk, we pass through invisible reference parms, so this isn't
355 actually a copy. */
356 return false;
359 return
360 (TREE_CODE (op) == EMPTY_CLASS_EXPR
361 || code == MODIFY_EXPR
362 || is_gimple_lvalue (op)
363 || INDIRECT_REF_P (op)
364 || (TREE_CODE (op) == CONSTRUCTOR
365 && CONSTRUCTOR_NELTS (op) == 0)
366 || (TREE_CODE (op) == CALL_EXPR
367 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
368 && !TREE_CLOBBER_P (op)
369 && is_really_empty_class (type, /*ignore_vptr*/true);
372 /* Returns true if evaluating E as an lvalue has side-effects;
373 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
374 have side-effects until there is a read or write through it. */
376 static bool
377 lvalue_has_side_effects (tree e)
379 if (!TREE_SIDE_EFFECTS (e))
380 return false;
381 while (handled_component_p (e))
383 if (TREE_CODE (e) == ARRAY_REF
384 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
385 return true;
386 e = TREE_OPERAND (e, 0);
388 if (DECL_P (e))
389 /* Just naming a variable has no side-effects. */
390 return false;
391 else if (INDIRECT_REF_P (e))
392 /* Similarly, indirection has no side-effects. */
393 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
394 else
395 /* For anything else, trust TREE_SIDE_EFFECTS. */
396 return TREE_SIDE_EFFECTS (e);
399 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
400 by expressions with side-effects in other operands. */
402 static enum gimplify_status
403 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
404 bool (*gimple_test_f) (tree))
406 enum gimplify_status t
407 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
408 if (t == GS_ERROR)
409 return GS_ERROR;
410 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
411 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
412 return t;
415 /* Like gimplify_arg, but if ORDERED is set (which should be set if
416 any of the arguments this argument is sequenced before has
417 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
418 are gimplified into SSA_NAME or a fresh temporary and for
419 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
421 static enum gimplify_status
422 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
423 bool ordered)
425 enum gimplify_status t;
426 if (ordered
427 && !is_gimple_reg_type (TREE_TYPE (*arg_p))
428 && TREE_CODE (*arg_p) == TARGET_EXPR)
430 /* gimplify_arg would strip away the TARGET_EXPR, but
431 that can mean we don't copy the argument and some following
432 argument with side-effect could modify it. */
433 protected_set_expr_location (*arg_p, call_location);
434 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
436 else
438 t = gimplify_arg (arg_p, pre_p, call_location);
439 if (t == GS_ERROR)
440 return GS_ERROR;
441 else if (ordered
442 && is_gimple_reg_type (TREE_TYPE (*arg_p))
443 && is_gimple_variable (*arg_p)
444 && TREE_CODE (*arg_p) != SSA_NAME
445 /* No need to force references into register, references
446 can't be modified. */
447 && !TYPE_REF_P (TREE_TYPE (*arg_p))
448 /* And this can't be modified either. */
449 && *arg_p != current_class_ptr)
450 *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
451 return t;
456 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
459 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
461 int saved_stmts_are_full_exprs_p = 0;
462 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
463 enum tree_code code = TREE_CODE (*expr_p);
464 enum gimplify_status ret;
466 if (STATEMENT_CODE_P (code))
468 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
469 current_stmt_tree ()->stmts_are_full_exprs_p
470 = STMT_IS_FULL_EXPR_P (*expr_p);
473 switch (code)
475 case AGGR_INIT_EXPR:
476 simplify_aggr_init_expr (expr_p);
477 ret = GS_OK;
478 break;
480 case VEC_INIT_EXPR:
482 *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
483 tf_warning_or_error);
485 hash_set<tree> pset;
486 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
487 cp_genericize_tree (expr_p, false);
488 copy_if_shared (expr_p);
489 ret = GS_OK;
491 break;
493 case THROW_EXPR:
494 /* FIXME communicate throw type to back end, probably by moving
495 THROW_EXPR into ../tree.def. */
496 *expr_p = TREE_OPERAND (*expr_p, 0);
497 ret = GS_OK;
498 break;
500 case MUST_NOT_THROW_EXPR:
501 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
502 break;
504 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
505 LHS of an assignment might also be involved in the RHS, as in bug
506 25979. */
507 case INIT_EXPR:
508 cp_gimplify_init_expr (expr_p);
509 if (TREE_CODE (*expr_p) != INIT_EXPR)
510 return GS_OK;
511 /* Fall through. */
512 case MODIFY_EXPR:
513 modify_expr_case:
515 /* If the back end isn't clever enough to know that the lhs and rhs
516 types are the same, add an explicit conversion. */
517 tree op0 = TREE_OPERAND (*expr_p, 0);
518 tree op1 = TREE_OPERAND (*expr_p, 1);
520 if (!error_operand_p (op0)
521 && !error_operand_p (op1)
522 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
523 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
524 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
525 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
526 TREE_TYPE (op0), op1);
528 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
530 while (TREE_CODE (op1) == TARGET_EXPR)
531 /* We're disconnecting the initializer from its target,
532 don't create a temporary. */
533 op1 = TARGET_EXPR_INITIAL (op1);
535 /* Remove any copies of empty classes. Also drop volatile
536 variables on the RHS to avoid infinite recursion from
537 gimplify_expr trying to load the value. */
538 if (TREE_SIDE_EFFECTS (op1))
540 if (TREE_THIS_VOLATILE (op1)
541 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
542 op1 = build_fold_addr_expr (op1);
544 gimplify_and_add (op1, pre_p);
546 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
547 is_gimple_lvalue, fb_lvalue);
548 *expr_p = TREE_OPERAND (*expr_p, 0);
549 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
550 /* Avoid 'return *<retval>;' */
551 *expr_p = TREE_OPERAND (*expr_p, 0);
553 /* P0145 says that the RHS is sequenced before the LHS.
554 gimplify_modify_expr gimplifies the RHS before the LHS, but that
555 isn't quite strong enough in two cases:
557 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
558 mean it's evaluated after the LHS.
560 2) the value calculation of the RHS is also sequenced before the
561 LHS, so for scalar assignment we need to preevaluate if the
562 RHS could be affected by LHS side-effects even if it has no
563 side-effects of its own. We don't need this for classes because
564 class assignment takes its RHS by reference. */
565 else if (flag_strong_eval_order > 1
566 && TREE_CODE (*expr_p) == MODIFY_EXPR
567 && lvalue_has_side_effects (op0)
568 && (TREE_CODE (op1) == CALL_EXPR
569 || (SCALAR_TYPE_P (TREE_TYPE (op1))
570 && !TREE_CONSTANT (op1))))
571 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
573 ret = GS_OK;
574 break;
576 case EMPTY_CLASS_EXPR:
577 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
578 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
579 ret = GS_OK;
580 break;
582 case BASELINK:
583 *expr_p = BASELINK_FUNCTIONS (*expr_p);
584 ret = GS_OK;
585 break;
587 case TRY_BLOCK:
588 genericize_try_block (expr_p);
589 ret = GS_OK;
590 break;
592 case HANDLER:
593 genericize_catch_block (expr_p);
594 ret = GS_OK;
595 break;
597 case EH_SPEC_BLOCK:
598 genericize_eh_spec_block (expr_p);
599 ret = GS_OK;
600 break;
602 case USING_STMT:
603 gcc_unreachable ();
605 case FOR_STMT:
606 case WHILE_STMT:
607 case DO_STMT:
608 case SWITCH_STMT:
609 case CONTINUE_STMT:
610 case BREAK_STMT:
611 gcc_unreachable ();
613 case OMP_FOR:
614 case OMP_SIMD:
615 case OMP_DISTRIBUTE:
616 case OMP_LOOP:
617 case OMP_TASKLOOP:
618 ret = cp_gimplify_omp_for (expr_p, pre_p);
619 break;
621 case EXPR_STMT:
622 gimplify_expr_stmt (expr_p);
623 ret = GS_OK;
624 break;
626 case UNARY_PLUS_EXPR:
628 tree arg = TREE_OPERAND (*expr_p, 0);
629 tree type = TREE_TYPE (*expr_p);
630 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
631 : arg;
632 ret = GS_OK;
634 break;
636 case CALL_EXPR:
637 ret = GS_OK;
638 if (flag_strong_eval_order == 2
639 && CALL_EXPR_FN (*expr_p)
640 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
641 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
643 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
644 enum gimplify_status t
645 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
646 is_gimple_call_addr);
647 if (t == GS_ERROR)
648 ret = GS_ERROR;
649 /* GIMPLE considers most pointer conversion useless, but for
650 calls we actually care about the exact function pointer type. */
651 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
652 CALL_EXPR_FN (*expr_p)
653 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
655 if (!CALL_EXPR_FN (*expr_p))
656 /* Internal function call. */;
657 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
659 /* This is a call to a (compound) assignment operator that used
660 the operator syntax; gimplify the RHS first. */
661 gcc_assert (call_expr_nargs (*expr_p) == 2);
662 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
663 enum gimplify_status t
664 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
665 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
666 if (t == GS_ERROR)
667 ret = GS_ERROR;
669 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
671 /* Leave the last argument for gimplify_call_expr, to avoid problems
672 with __builtin_va_arg_pack(). */
673 int nargs = call_expr_nargs (*expr_p) - 1;
674 int last_side_effects_arg = -1;
675 for (int i = nargs; i > 0; --i)
676 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
678 last_side_effects_arg = i;
679 break;
681 for (int i = 0; i < nargs; ++i)
683 enum gimplify_status t
684 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
685 i < last_side_effects_arg);
686 if (t == GS_ERROR)
687 ret = GS_ERROR;
690 else if (flag_strong_eval_order
691 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
693 /* If flag_strong_eval_order, evaluate the object argument first. */
694 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
695 if (INDIRECT_TYPE_P (fntype))
696 fntype = TREE_TYPE (fntype);
697 if (TREE_CODE (fntype) == METHOD_TYPE)
699 int nargs = call_expr_nargs (*expr_p);
700 bool side_effects = false;
701 for (int i = 1; i < nargs; ++i)
702 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
704 side_effects = true;
705 break;
707 enum gimplify_status t
708 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
709 side_effects);
710 if (t == GS_ERROR)
711 ret = GS_ERROR;
714 if (ret != GS_ERROR)
716 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
717 if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
718 switch (DECL_FE_FUNCTION_CODE (decl))
720 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
721 *expr_p = boolean_false_node;
722 break;
723 case CP_BUILT_IN_SOURCE_LOCATION:
724 *expr_p
725 = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
726 break;
727 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
728 *expr_p
729 = fold_builtin_is_corresponding_member
730 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
731 &CALL_EXPR_ARG (*expr_p, 0));
732 break;
733 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
734 *expr_p
735 = fold_builtin_is_pointer_inverconvertible_with_class
736 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
737 &CALL_EXPR_ARG (*expr_p, 0));
738 break;
739 default:
740 break;
743 break;
745 case TARGET_EXPR:
746 /* A TARGET_EXPR that expresses direct-initialization should have been
747 elided by cp_gimplify_init_expr. */
748 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
749 /* Likewise, but allow extra temps of trivial type so that
750 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
751 on the rhs of an assignment, as in constexpr-aggr1.C. */
752 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p)
753 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p)));
754 ret = GS_UNHANDLED;
755 break;
757 case PTRMEM_CST:
758 *expr_p = cplus_expand_constant (*expr_p);
759 if (TREE_CODE (*expr_p) == PTRMEM_CST)
760 ret = GS_ERROR;
761 else
762 ret = GS_OK;
763 break;
765 case RETURN_EXPR:
766 if (TREE_OPERAND (*expr_p, 0)
767 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
768 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
770 expr_p = &TREE_OPERAND (*expr_p, 0);
771 /* Avoid going through the INIT_EXPR case, which can
772 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
773 goto modify_expr_case;
775 /* Fall through. */
777 default:
778 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
779 break;
782 /* Restore saved state. */
783 if (STATEMENT_CODE_P (code))
784 current_stmt_tree ()->stmts_are_full_exprs_p
785 = saved_stmts_are_full_exprs_p;
787 return ret;
790 static inline bool
791 is_invisiref_parm (const_tree t)
793 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
794 && DECL_BY_REFERENCE (t));
797 /* A stable comparison routine for use with splay trees and DECLs. */
799 static int
800 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
802 tree a = (tree) xa;
803 tree b = (tree) xb;
805 return DECL_UID (a) - DECL_UID (b);
808 /* OpenMP context during genericization. */
810 struct cp_genericize_omp_taskreg
812 bool is_parallel;
813 bool default_shared;
814 struct cp_genericize_omp_taskreg *outer;
815 splay_tree variables;
818 /* Return true if genericization should try to determine if
819 DECL is firstprivate or shared within task regions. */
821 static bool
822 omp_var_to_track (tree decl)
824 tree type = TREE_TYPE (decl);
825 if (is_invisiref_parm (decl))
826 type = TREE_TYPE (type);
827 else if (TYPE_REF_P (type))
828 type = TREE_TYPE (type);
829 while (TREE_CODE (type) == ARRAY_TYPE)
830 type = TREE_TYPE (type);
831 if (type == error_mark_node || !CLASS_TYPE_P (type))
832 return false;
833 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
834 return false;
835 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
836 return false;
837 return true;
840 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
842 static void
843 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
845 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
846 (splay_tree_key) decl);
847 if (n == NULL)
849 int flags = OMP_CLAUSE_DEFAULT_SHARED;
850 if (omp_ctx->outer)
851 omp_cxx_notice_variable (omp_ctx->outer, decl);
852 if (!omp_ctx->default_shared)
854 struct cp_genericize_omp_taskreg *octx;
856 for (octx = omp_ctx->outer; octx; octx = octx->outer)
858 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
859 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
861 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
862 break;
864 if (octx->is_parallel)
865 break;
867 if (octx == NULL
868 && (TREE_CODE (decl) == PARM_DECL
869 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
870 && DECL_CONTEXT (decl) == current_function_decl)))
871 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
872 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
874 /* DECL is implicitly determined firstprivate in
875 the current task construct. Ensure copy ctor and
876 dtor are instantiated, because during gimplification
877 it will be already too late. */
878 tree type = TREE_TYPE (decl);
879 if (is_invisiref_parm (decl))
880 type = TREE_TYPE (type);
881 else if (TYPE_REF_P (type))
882 type = TREE_TYPE (type);
883 while (TREE_CODE (type) == ARRAY_TYPE)
884 type = TREE_TYPE (type);
885 get_copy_ctor (type, tf_none);
886 get_dtor (type, tf_none);
889 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
893 /* If we might need to clean up a partially constructed object, break down the
894 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
895 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
896 the result. */
898 static void
899 cp_genericize_init (tree *replace, tree from, tree to)
901 tree init = NULL_TREE;
902 if (TREE_CODE (from) == VEC_INIT_EXPR)
903 init = expand_vec_init_expr (to, from, tf_warning_or_error);
904 else if (flag_exceptions
905 && TREE_CODE (from) == CONSTRUCTOR
906 && TREE_SIDE_EFFECTS (from)
907 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
909 to = cp_stabilize_reference (to);
910 replace_placeholders (from, to);
911 init = split_nonconstant_init (to, from);
914 if (init)
916 if (*replace == from)
917 /* Make cp_gimplify_init_expr call replace_decl on this
918 TARGET_EXPR_INITIAL. */
919 init = fold_convert (void_type_node, init);
920 *replace = init;
924 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
926 static void
927 cp_genericize_init_expr (tree *stmt_p)
929 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
930 tree to = TREE_OPERAND (*stmt_p, 0);
931 tree from = TREE_OPERAND (*stmt_p, 1);
932 if (SIMPLE_TARGET_EXPR_P (from)
933 /* Return gets confused if we clobber its INIT_EXPR this soon. */
934 && TREE_CODE (to) != RESULT_DECL)
935 from = TARGET_EXPR_INITIAL (from);
936 cp_genericize_init (stmt_p, from, to);
939 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
940 replace_decl later when we know what we're initializing. */
942 static void
943 cp_genericize_target_expr (tree *stmt_p)
945 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
946 tree slot = TARGET_EXPR_SLOT (*stmt_p);
947 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
948 TARGET_EXPR_INITIAL (*stmt_p), slot);
949 gcc_assert (!DECL_INITIAL (slot));
952 /* Genericization context. */
954 struct cp_genericize_data
956 hash_set<tree> *p_set;
957 auto_vec<tree> bind_expr_stack;
958 struct cp_genericize_omp_taskreg *omp_ctx;
959 tree try_block;
960 bool no_sanitize_p;
961 bool handle_invisiref_parm_p;
964 /* Perform any pre-gimplification folding of C++ front end trees to
965 GENERIC.
966 Note: The folding of non-omp cases is something to move into
967 the middle-end. As for now we have most foldings only on GENERIC
968 in fold-const, we need to perform this before transformation to
969 GIMPLE-form. */
971 struct cp_fold_data
973 hash_set<tree> pset;
974 bool genericize; // called from cp_fold_function?
976 cp_fold_data (bool g): genericize (g) {}
979 static tree
980 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
982 cp_fold_data *data = (cp_fold_data*)data_;
983 tree stmt = *stmt_p;
984 enum tree_code code = TREE_CODE (stmt);
986 switch (code)
988 case PTRMEM_CST:
989 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
990 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
992 if (!data->pset.add (stmt))
993 error_at (PTRMEM_CST_LOCATION (stmt),
994 "taking address of an immediate function %qD",
995 PTRMEM_CST_MEMBER (stmt));
996 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
997 break;
999 break;
1001 case ADDR_EXPR:
1002 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
1003 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
1005 error_at (EXPR_LOCATION (stmt),
1006 "taking address of an immediate function %qD",
1007 TREE_OPERAND (stmt, 0));
1008 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1009 break;
1011 break;
1013 default:
1014 break;
1017 *stmt_p = stmt = cp_fold (*stmt_p);
1019 if (data->pset.add (stmt))
1021 /* Don't walk subtrees of stmts we've already walked once, otherwise
1022 we can have exponential complexity with e.g. lots of nested
1023 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1024 always the same tree, which the first time cp_fold_r has been
1025 called on it had the subtrees walked. */
1026 *walk_subtrees = 0;
1027 return NULL;
1030 code = TREE_CODE (stmt);
1031 switch (code)
1033 tree x;
1034 int i, n;
1035 case OMP_FOR:
1036 case OMP_SIMD:
1037 case OMP_DISTRIBUTE:
1038 case OMP_LOOP:
1039 case OMP_TASKLOOP:
1040 case OACC_LOOP:
1041 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1042 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1043 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1044 x = OMP_FOR_COND (stmt);
1045 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1047 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1048 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1050 else if (x && TREE_CODE (x) == TREE_VEC)
1052 n = TREE_VEC_LENGTH (x);
1053 for (i = 0; i < n; i++)
1055 tree o = TREE_VEC_ELT (x, i);
1056 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1057 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1060 x = OMP_FOR_INCR (stmt);
1061 if (x && TREE_CODE (x) == TREE_VEC)
1063 n = TREE_VEC_LENGTH (x);
1064 for (i = 0; i < n; i++)
1066 tree o = TREE_VEC_ELT (x, i);
1067 if (o && TREE_CODE (o) == MODIFY_EXPR)
1068 o = TREE_OPERAND (o, 1);
1069 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1070 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1072 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1073 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1077 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1078 *walk_subtrees = 0;
1079 return NULL;
1081 case IF_STMT:
1082 if (IF_STMT_CONSTEVAL_P (stmt))
1084 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1085 boolean_false_node. */
1086 cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1087 cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1088 *walk_subtrees = 0;
1089 return NULL;
1091 break;
1093 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1094 here rather than in cp_genericize to avoid problems with the invisible
1095 reference transition. */
1096 case INIT_EXPR:
1097 if (data->genericize)
1098 cp_genericize_init_expr (stmt_p);
1099 break;
1101 case TARGET_EXPR:
1102 if (data->genericize)
1103 cp_genericize_target_expr (stmt_p);
1105 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1106 that case, use it in place of this one. */
1107 if (tree &init = TARGET_EXPR_INITIAL (stmt))
1109 cp_walk_tree (&init, cp_fold_r, data, NULL);
1110 *walk_subtrees = 0;
1111 if (TREE_CODE (init) == TARGET_EXPR)
1113 TARGET_EXPR_ELIDING_P (init) = TARGET_EXPR_ELIDING_P (stmt);
1114 *stmt_p = init;
1117 break;
1119 default:
1120 break;
1123 return NULL;
1126 /* Fold ALL the trees! FIXME we should be able to remove this, but
1127 apparently that still causes optimization regressions. */
1129 void
1130 cp_fold_function (tree fndecl)
1132 cp_fold_data data (/*genericize*/true);
1133 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1136 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1138 static tree genericize_spaceship (tree expr)
1140 iloc_sentinel s (cp_expr_location (expr));
1141 tree type = TREE_TYPE (expr);
1142 tree op0 = TREE_OPERAND (expr, 0);
1143 tree op1 = TREE_OPERAND (expr, 1);
1144 return genericize_spaceship (input_location, type, op0, op1);
1147 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1148 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1149 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1150 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1152 tree
1153 predeclare_vla (tree expr)
1155 tree type = TREE_TYPE (expr);
1156 if (type == error_mark_node)
1157 return expr;
1158 if (is_typedef_decl (expr))
1159 type = DECL_ORIGINAL_TYPE (expr);
1161 /* We need to strip pointers for gimplify_type_sizes. */
1162 tree vla = type;
1163 while (POINTER_TYPE_P (vla))
1165 if (TYPE_NAME (vla))
1166 return expr;
1167 vla = TREE_TYPE (vla);
1169 if (vla == type || TYPE_NAME (vla)
1170 || !variably_modified_type_p (vla, NULL_TREE))
1171 return expr;
1173 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1174 DECL_ARTIFICIAL (decl) = 1;
1175 TYPE_NAME (vla) = decl;
1176 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1177 if (DECL_P (expr))
1179 add_stmt (dexp);
1180 return NULL_TREE;
1182 else
1184 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1185 return expr;
1189 /* Perform any pre-gimplification lowering of C++ front end trees to
1190 GENERIC. */
1192 static tree
1193 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1195 tree stmt = *stmt_p;
1196 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1197 hash_set<tree> *p_set = wtd->p_set;
1199 /* If in an OpenMP context, note var uses. */
1200 if (UNLIKELY (wtd->omp_ctx != NULL)
1201 && (VAR_P (stmt)
1202 || TREE_CODE (stmt) == PARM_DECL
1203 || TREE_CODE (stmt) == RESULT_DECL)
1204 && omp_var_to_track (stmt))
1205 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1207 /* Don't dereference parms in a thunk, pass the references through. */
1208 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1209 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1211 *walk_subtrees = 0;
1212 return NULL;
1215 /* Dereference invisible reference parms. */
1216 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1218 *stmt_p = convert_from_reference (stmt);
1219 p_set->add (*stmt_p);
1220 *walk_subtrees = 0;
1221 return NULL;
1224 /* Map block scope extern declarations to visible declarations with the
1225 same name and type in outer scopes if any. */
1226 if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1227 if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1229 if (alias != error_mark_node)
1231 *stmt_p = alias;
1232 TREE_USED (alias) |= TREE_USED (stmt);
1234 *walk_subtrees = 0;
1235 return NULL;
1238 if (TREE_CODE (stmt) == INTEGER_CST
1239 && TYPE_REF_P (TREE_TYPE (stmt))
1240 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1241 && !wtd->no_sanitize_p)
1243 ubsan_maybe_instrument_reference (stmt_p);
1244 if (*stmt_p != stmt)
1246 *walk_subtrees = 0;
1247 return NULL_TREE;
1251 /* Other than invisiref parms, don't walk the same tree twice. */
1252 if (p_set->contains (stmt))
1254 *walk_subtrees = 0;
1255 return NULL_TREE;
1258 switch (TREE_CODE (stmt))
1260 case ADDR_EXPR:
1261 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1263 /* If in an OpenMP context, note var uses. */
1264 if (UNLIKELY (wtd->omp_ctx != NULL)
1265 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1266 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1267 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1268 *walk_subtrees = 0;
1270 break;
1272 case RETURN_EXPR:
1273 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1274 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1275 *walk_subtrees = 0;
1276 break;
1278 case OMP_CLAUSE:
1279 switch (OMP_CLAUSE_CODE (stmt))
1281 case OMP_CLAUSE_LASTPRIVATE:
1282 /* Don't dereference an invisiref in OpenMP clauses. */
1283 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1285 *walk_subtrees = 0;
1286 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1287 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1288 cp_genericize_r, data, NULL);
1290 break;
1291 case OMP_CLAUSE_PRIVATE:
1292 /* Don't dereference an invisiref in OpenMP clauses. */
1293 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1294 *walk_subtrees = 0;
1295 else if (wtd->omp_ctx != NULL)
1297 /* Private clause doesn't cause any references to the
1298 var in outer contexts, avoid calling
1299 omp_cxx_notice_variable for it. */
1300 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1301 wtd->omp_ctx = NULL;
1302 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1303 data, NULL);
1304 wtd->omp_ctx = old;
1305 *walk_subtrees = 0;
1307 break;
1308 case OMP_CLAUSE_SHARED:
1309 case OMP_CLAUSE_FIRSTPRIVATE:
1310 case OMP_CLAUSE_COPYIN:
1311 case OMP_CLAUSE_COPYPRIVATE:
1312 case OMP_CLAUSE_INCLUSIVE:
1313 case OMP_CLAUSE_EXCLUSIVE:
1314 /* Don't dereference an invisiref in OpenMP clauses. */
1315 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1316 *walk_subtrees = 0;
1317 break;
1318 case OMP_CLAUSE_REDUCTION:
1319 case OMP_CLAUSE_IN_REDUCTION:
1320 case OMP_CLAUSE_TASK_REDUCTION:
1321 /* Don't dereference an invisiref in reduction clause's
1322 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1323 still needs to be genericized. */
1324 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1326 *walk_subtrees = 0;
1327 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1328 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1329 cp_genericize_r, data, NULL);
1330 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1331 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1332 cp_genericize_r, data, NULL);
1334 break;
1335 default:
1336 break;
1338 break;
1340 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1341 to lower this construct before scanning it, so we need to lower these
1342 before doing anything else. */
1343 case CLEANUP_STMT:
1344 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1345 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1346 : TRY_FINALLY_EXPR,
1347 void_type_node,
1348 CLEANUP_BODY (stmt),
1349 CLEANUP_EXPR (stmt));
1350 break;
1352 case IF_STMT:
1353 genericize_if_stmt (stmt_p);
1354 /* *stmt_p has changed, tail recurse to handle it again. */
1355 return cp_genericize_r (stmt_p, walk_subtrees, data);
1357 /* COND_EXPR might have incompatible types in branches if one or both
1358 arms are bitfields. Fix it up now. */
1359 case COND_EXPR:
1361 tree type_left
1362 = (TREE_OPERAND (stmt, 1)
1363 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1364 : NULL_TREE);
1365 tree type_right
1366 = (TREE_OPERAND (stmt, 2)
1367 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1368 : NULL_TREE);
1369 if (type_left
1370 && !useless_type_conversion_p (TREE_TYPE (stmt),
1371 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1373 TREE_OPERAND (stmt, 1)
1374 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1375 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1376 type_left));
1378 if (type_right
1379 && !useless_type_conversion_p (TREE_TYPE (stmt),
1380 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1382 TREE_OPERAND (stmt, 2)
1383 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1384 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1385 type_right));
1388 break;
1390 case BIND_EXPR:
1391 if (UNLIKELY (wtd->omp_ctx != NULL))
1393 tree decl;
1394 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1395 if (VAR_P (decl)
1396 && !DECL_EXTERNAL (decl)
1397 && omp_var_to_track (decl))
1399 splay_tree_node n
1400 = splay_tree_lookup (wtd->omp_ctx->variables,
1401 (splay_tree_key) decl);
1402 if (n == NULL)
1403 splay_tree_insert (wtd->omp_ctx->variables,
1404 (splay_tree_key) decl,
1405 TREE_STATIC (decl)
1406 ? OMP_CLAUSE_DEFAULT_SHARED
1407 : OMP_CLAUSE_DEFAULT_PRIVATE);
1410 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1412 /* The point here is to not sanitize static initializers. */
1413 bool no_sanitize_p = wtd->no_sanitize_p;
1414 wtd->no_sanitize_p = true;
1415 for (tree decl = BIND_EXPR_VARS (stmt);
1416 decl;
1417 decl = DECL_CHAIN (decl))
1418 if (VAR_P (decl)
1419 && TREE_STATIC (decl)
1420 && DECL_INITIAL (decl))
1421 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1422 wtd->no_sanitize_p = no_sanitize_p;
1424 wtd->bind_expr_stack.safe_push (stmt);
1425 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1426 cp_genericize_r, data, NULL);
1427 wtd->bind_expr_stack.pop ();
1428 break;
1430 case ASSERTION_STMT:
1431 case PRECONDITION_STMT:
1432 case POSTCONDITION_STMT:
1434 if (tree check = build_contract_check (stmt))
1436 *stmt_p = check;
1437 return cp_genericize_r (stmt_p, walk_subtrees, data);
1440 /* If we didn't build a check, replace it with void_node so we don't
1441 leak contracts into GENERIC. */
1442 *stmt_p = void_node;
1443 *walk_subtrees = 0;
1445 break;
1447 case USING_STMT:
1449 tree block = NULL_TREE;
1451 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1452 BLOCK, and append an IMPORTED_DECL to its
1453 BLOCK_VARS chained list. */
1454 if (wtd->bind_expr_stack.exists ())
1456 int i;
1457 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1458 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1459 break;
1461 if (block)
1463 tree decl = TREE_OPERAND (stmt, 0);
1464 gcc_assert (decl);
1466 if (undeduced_auto_decl (decl))
1467 /* Omit from the GENERIC, the back-end can't handle it. */;
1468 else
1470 tree using_directive = make_node (IMPORTED_DECL);
1471 TREE_TYPE (using_directive) = void_type_node;
1472 DECL_CONTEXT (using_directive) = current_function_decl;
1474 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1475 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1476 BLOCK_VARS (block) = using_directive;
1479 /* The USING_STMT won't appear in GENERIC. */
1480 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1481 *walk_subtrees = 0;
1483 break;
1485 case DECL_EXPR:
1486 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1488 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1489 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1490 *walk_subtrees = 0;
1492 else
1494 tree d = DECL_EXPR_DECL (stmt);
1495 if (VAR_P (d))
1496 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1498 break;
1500 case OMP_PARALLEL:
1501 case OMP_TASK:
1502 case OMP_TASKLOOP:
1504 struct cp_genericize_omp_taskreg omp_ctx;
1505 tree c, decl;
1506 splay_tree_node n;
1508 *walk_subtrees = 0;
1509 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1510 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1511 omp_ctx.default_shared = omp_ctx.is_parallel;
1512 omp_ctx.outer = wtd->omp_ctx;
1513 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1514 wtd->omp_ctx = &omp_ctx;
1515 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1516 switch (OMP_CLAUSE_CODE (c))
1518 case OMP_CLAUSE_SHARED:
1519 case OMP_CLAUSE_PRIVATE:
1520 case OMP_CLAUSE_FIRSTPRIVATE:
1521 case OMP_CLAUSE_LASTPRIVATE:
1522 decl = OMP_CLAUSE_DECL (c);
1523 if (decl == error_mark_node || !omp_var_to_track (decl))
1524 break;
1525 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1526 if (n != NULL)
1527 break;
1528 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1529 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1530 ? OMP_CLAUSE_DEFAULT_SHARED
1531 : OMP_CLAUSE_DEFAULT_PRIVATE);
1532 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1533 omp_cxx_notice_variable (omp_ctx.outer, decl);
1534 break;
1535 case OMP_CLAUSE_DEFAULT:
1536 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1537 omp_ctx.default_shared = true;
1538 default:
1539 break;
1541 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1542 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1543 cp_genericize_r, cp_walk_subtrees);
1544 else
1545 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1546 wtd->omp_ctx = omp_ctx.outer;
1547 splay_tree_delete (omp_ctx.variables);
1549 break;
1551 case OMP_TARGET:
1552 cfun->has_omp_target = true;
1553 break;
1555 case TRY_BLOCK:
1557 *walk_subtrees = 0;
1558 tree try_block = wtd->try_block;
1559 wtd->try_block = stmt;
1560 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1561 wtd->try_block = try_block;
1562 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1564 break;
1566 case MUST_NOT_THROW_EXPR:
1567 /* MUST_NOT_THROW_COND might be something else with TM. */
1568 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1570 *walk_subtrees = 0;
1571 tree try_block = wtd->try_block;
1572 wtd->try_block = stmt;
1573 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1574 wtd->try_block = try_block;
1576 break;
1578 case THROW_EXPR:
1580 location_t loc = location_of (stmt);
1581 if (warning_suppressed_p (stmt /* What warning? */))
1582 /* Never mind. */;
1583 else if (wtd->try_block)
1585 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1587 auto_diagnostic_group d;
1588 if (warning_at (loc, OPT_Wterminate,
1589 "%<throw%> will always call %<terminate%>")
1590 && cxx_dialect >= cxx11
1591 && DECL_DESTRUCTOR_P (current_function_decl))
1592 inform (loc, "in C++11 destructors default to %<noexcept%>");
1595 else
1597 if (warn_cxx11_compat && cxx_dialect < cxx11
1598 && DECL_DESTRUCTOR_P (current_function_decl)
1599 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1600 == NULL_TREE)
1601 && (get_defaulted_eh_spec (current_function_decl)
1602 == empty_except_spec))
1603 warning_at (loc, OPT_Wc__11_compat,
1604 "in C++11 this %<throw%> will call %<terminate%> "
1605 "because destructors default to %<noexcept%>");
1608 break;
1610 case CONVERT_EXPR:
1611 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt)));
1612 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1613 break;
1615 case SPACESHIP_EXPR:
1616 *stmt_p = genericize_spaceship (*stmt_p);
1617 break;
1619 case PTRMEM_CST:
1620 /* By the time we get here we're handing off to the back end, so we don't
1621 need or want to preserve PTRMEM_CST anymore. */
1622 *stmt_p = cplus_expand_constant (stmt);
1623 *walk_subtrees = 0;
1624 break;
1626 case MEM_REF:
1627 /* For MEM_REF, make sure not to sanitize the second operand even
1628 if it has reference type. It is just an offset with a type
1629 holding other information. There is no other processing we
1630 need to do for INTEGER_CSTs, so just ignore the second argument
1631 unconditionally. */
1632 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1633 *walk_subtrees = 0;
1634 break;
1636 case NOP_EXPR:
1637 *stmt_p = predeclare_vla (*stmt_p);
1638 if (!wtd->no_sanitize_p
1639 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1640 && TYPE_REF_P (TREE_TYPE (stmt)))
1641 ubsan_maybe_instrument_reference (stmt_p);
1642 break;
1644 case CALL_EXPR:
1645 /* Evaluate function concept checks instead of treating them as
1646 normal functions. */
1647 if (concept_check_p (stmt))
1649 *stmt_p = evaluate_concept_check (stmt);
1650 * walk_subtrees = 0;
1651 break;
1654 if (!wtd->no_sanitize_p
1655 && sanitize_flags_p ((SANITIZE_NULL
1656 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1658 tree fn = CALL_EXPR_FN (stmt);
1659 if (fn != NULL_TREE
1660 && !error_operand_p (fn)
1661 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1662 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1664 bool is_ctor
1665 = TREE_CODE (fn) == ADDR_EXPR
1666 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1667 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1668 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1669 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1670 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1671 cp_ubsan_maybe_instrument_member_call (stmt);
1673 else if (fn == NULL_TREE
1674 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1675 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1676 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1677 *walk_subtrees = 0;
1679 /* Fall through. */
1680 case AGGR_INIT_EXPR:
1681 /* For calls to a multi-versioned function, overload resolution
1682 returns the function with the highest target priority, that is,
1683 the version that will checked for dispatching first. If this
1684 version is inlinable, a direct call to this version can be made
1685 otherwise the call should go through the dispatcher. */
1687 tree fn = cp_get_callee_fndecl_nofold (stmt);
1688 if (fn && DECL_FUNCTION_VERSIONED (fn)
1689 && (current_function_decl == NULL
1690 || !targetm.target_option.can_inline_p (current_function_decl,
1691 fn)))
1692 if (tree dis = get_function_version_dispatcher (fn))
1694 mark_versions_used (dis);
1695 dis = build_address (dis);
1696 if (TREE_CODE (stmt) == CALL_EXPR)
1697 CALL_EXPR_FN (stmt) = dis;
1698 else
1699 AGGR_INIT_EXPR_FN (stmt) = dis;
1702 break;
1704 case TARGET_EXPR:
1705 if (TARGET_EXPR_INITIAL (stmt)
1706 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1707 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1708 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1709 break;
1711 case TEMPLATE_ID_EXPR:
1712 gcc_assert (concept_check_p (stmt));
1713 /* Emit the value of the concept check. */
1714 *stmt_p = evaluate_concept_check (stmt);
1715 walk_subtrees = 0;
1716 break;
1718 case OMP_DISTRIBUTE:
1719 /* Need to explicitly instantiate copy ctors on class iterators of
1720 composite distribute parallel for. */
1721 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1723 tree *data[4] = { NULL, NULL, NULL, NULL };
1724 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1725 find_combined_omp_for, data, NULL);
1726 if (inner != NULL_TREE
1727 && TREE_CODE (inner) == OMP_FOR)
1729 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1730 if (OMP_FOR_ORIG_DECLS (inner)
1731 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1732 i)) == TREE_LIST
1733 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1734 i)))
1736 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1737 /* Class iterators aren't allowed on OMP_SIMD, so the only
1738 case we need to solve is distribute parallel for. */
1739 gcc_assert (TREE_CODE (inner) == OMP_FOR
1740 && data[1]);
1741 tree orig_decl = TREE_PURPOSE (orig);
1742 tree c, cl = NULL_TREE;
1743 for (c = OMP_FOR_CLAUSES (inner);
1744 c; c = OMP_CLAUSE_CHAIN (c))
1745 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1746 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1747 && OMP_CLAUSE_DECL (c) == orig_decl)
1749 cl = c;
1750 break;
1752 if (cl == NULL_TREE)
1754 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1755 c; c = OMP_CLAUSE_CHAIN (c))
1756 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1757 && OMP_CLAUSE_DECL (c) == orig_decl)
1759 cl = c;
1760 break;
1763 if (cl)
1765 orig_decl = require_complete_type (orig_decl);
1766 tree inner_type = TREE_TYPE (orig_decl);
1767 if (orig_decl == error_mark_node)
1768 continue;
1769 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1770 inner_type = TREE_TYPE (inner_type);
1772 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1773 inner_type = TREE_TYPE (inner_type);
1774 get_copy_ctor (inner_type, tf_warning_or_error);
1779 /* FALLTHRU */
1781 case FOR_STMT:
1782 case WHILE_STMT:
1783 case DO_STMT:
1784 case SWITCH_STMT:
1785 case CONTINUE_STMT:
1786 case BREAK_STMT:
1787 case OMP_FOR:
1788 case OMP_SIMD:
1789 case OMP_LOOP:
1790 case OACC_LOOP:
1791 case STATEMENT_LIST:
1792 /* These cases are handled by shared code. */
1793 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1794 cp_genericize_r, cp_walk_subtrees);
1795 break;
1797 case BIT_CAST_EXPR:
1798 *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1799 TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1800 break;
1802 default:
1803 if (IS_TYPE_OR_DECL_P (stmt))
1804 *walk_subtrees = 0;
1805 break;
1808 p_set->add (*stmt_p);
1810 return NULL;
1813 /* Lower C++ front end trees to GENERIC in T_P. */
1815 static void
1816 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1818 struct cp_genericize_data wtd;
1820 wtd.p_set = new hash_set<tree>;
1821 wtd.bind_expr_stack.create (0);
1822 wtd.omp_ctx = NULL;
1823 wtd.try_block = NULL_TREE;
1824 wtd.no_sanitize_p = false;
1825 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1826 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1827 delete wtd.p_set;
1828 if (sanitize_flags_p (SANITIZE_VPTR))
1829 cp_ubsan_instrument_member_accesses (t_p);
1832 /* If a function that should end with a return in non-void
1833 function doesn't obviously end with return, add ubsan
1834 instrumentation code to verify it at runtime. If -fsanitize=return
1835 is not enabled, instrument __builtin_unreachable. */
1837 static void
1838 cp_maybe_instrument_return (tree fndecl)
1840 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1841 || DECL_CONSTRUCTOR_P (fndecl)
1842 || DECL_DESTRUCTOR_P (fndecl)
1843 || !targetm.warn_func_return (fndecl))
1844 return;
1846 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1847 /* Don't add __builtin_unreachable () if not optimizing, it will not
1848 improve any optimizations in that case, just break UB code.
1849 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1850 UBSan covers this with ubsan_instrument_return above where sufficient
1851 information is provided, while the __builtin_unreachable () below
1852 if return sanitization is disabled will just result in hard to
1853 understand runtime error without location. */
1854 && ((!optimize && !flag_unreachable_traps)
1855 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1856 return;
1858 tree t = DECL_SAVED_TREE (fndecl);
1859 while (t)
1861 switch (TREE_CODE (t))
1863 case BIND_EXPR:
1864 t = BIND_EXPR_BODY (t);
1865 continue;
1866 case TRY_FINALLY_EXPR:
1867 case CLEANUP_POINT_EXPR:
1868 t = TREE_OPERAND (t, 0);
1869 continue;
1870 case STATEMENT_LIST:
1872 tree_stmt_iterator i = tsi_last (t);
1873 while (!tsi_end_p (i))
1875 tree p = tsi_stmt (i);
1876 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1877 break;
1878 tsi_prev (&i);
1880 if (!tsi_end_p (i))
1882 t = tsi_stmt (i);
1883 continue;
1886 break;
1887 case RETURN_EXPR:
1888 return;
1889 default:
1890 break;
1892 break;
1894 if (t == NULL_TREE)
1895 return;
1896 tree *p = &DECL_SAVED_TREE (fndecl);
1897 if (TREE_CODE (*p) == BIND_EXPR)
1898 p = &BIND_EXPR_BODY (*p);
1900 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1901 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1902 t = ubsan_instrument_return (loc);
1903 else
1904 t = build_builtin_unreachable (BUILTINS_LOCATION);
1906 append_to_statement_list (t, p);
1909 void
1910 cp_genericize (tree fndecl)
1912 tree t;
1914 /* Fix up the types of parms passed by invisible reference. */
1915 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1916 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1918 /* If a function's arguments are copied to create a thunk,
1919 then DECL_BY_REFERENCE will be set -- but the type of the
1920 argument will be a pointer type, so we will never get
1921 here. */
1922 gcc_assert (!DECL_BY_REFERENCE (t));
1923 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1924 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1925 DECL_BY_REFERENCE (t) = 1;
1926 TREE_ADDRESSABLE (t) = 0;
1927 relayout_decl (t);
1930 /* Do the same for the return value. */
1931 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1933 t = DECL_RESULT (fndecl);
1934 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1935 DECL_BY_REFERENCE (t) = 1;
1936 TREE_ADDRESSABLE (t) = 0;
1937 relayout_decl (t);
1938 if (DECL_NAME (t))
1940 /* Adjust DECL_VALUE_EXPR of the original var. */
1941 tree outer = outer_curly_brace_block (current_function_decl);
1942 tree var;
1944 if (outer)
1945 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1946 if (VAR_P (var)
1947 && DECL_NAME (t) == DECL_NAME (var)
1948 && DECL_HAS_VALUE_EXPR_P (var)
1949 && DECL_VALUE_EXPR (var) == t)
1951 tree val = convert_from_reference (t);
1952 SET_DECL_VALUE_EXPR (var, val);
1953 break;
1958 /* If we're a clone, the body is already GIMPLE. */
1959 if (DECL_CLONED_FUNCTION_P (fndecl))
1960 return;
1962 /* Allow cp_genericize calls to be nested. */
1963 bc_state_t save_state;
1964 save_bc_state (&save_state);
1966 /* We do want to see every occurrence of the parms, so we can't just use
1967 walk_tree's hash functionality. */
1968 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1970 cp_maybe_instrument_return (fndecl);
1972 /* Do everything else. */
1973 c_genericize (fndecl);
1974 restore_bc_state (&save_state);
1977 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1978 NULL if there is in fact nothing to do. ARG2 may be null if FN
1979 actually only takes one argument. */
1981 static tree
1982 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1984 tree defparm, parm, t;
1985 int i = 0;
1986 int nargs;
1987 tree *argarray;
1989 if (fn == NULL)
1990 return NULL;
1992 nargs = list_length (DECL_ARGUMENTS (fn));
1993 argarray = XALLOCAVEC (tree, nargs);
1995 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1996 if (arg2)
1997 defparm = TREE_CHAIN (defparm);
1999 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
2000 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2002 tree inner_type = TREE_TYPE (arg1);
2003 tree start1, end1, p1;
2004 tree start2 = NULL, p2 = NULL;
2005 tree ret = NULL, lab;
2007 start1 = arg1;
2008 start2 = arg2;
2011 inner_type = TREE_TYPE (inner_type);
2012 start1 = build4 (ARRAY_REF, inner_type, start1,
2013 size_zero_node, NULL, NULL);
2014 if (arg2)
2015 start2 = build4 (ARRAY_REF, inner_type, start2,
2016 size_zero_node, NULL, NULL);
2018 while (TREE_CODE (inner_type) == ARRAY_TYPE);
2019 start1 = build_fold_addr_expr_loc (input_location, start1);
2020 if (arg2)
2021 start2 = build_fold_addr_expr_loc (input_location, start2);
2023 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2024 end1 = fold_build_pointer_plus (start1, end1);
2026 p1 = create_tmp_var (TREE_TYPE (start1));
2027 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2028 append_to_statement_list (t, &ret);
2030 if (arg2)
2032 p2 = create_tmp_var (TREE_TYPE (start2));
2033 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2034 append_to_statement_list (t, &ret);
2037 lab = create_artificial_label (input_location);
2038 t = build1 (LABEL_EXPR, void_type_node, lab);
2039 append_to_statement_list (t, &ret);
2041 argarray[i++] = p1;
2042 if (arg2)
2043 argarray[i++] = p2;
2044 /* Handle default arguments. */
2045 for (parm = defparm; parm && parm != void_list_node;
2046 parm = TREE_CHAIN (parm), i++)
2047 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2048 TREE_PURPOSE (parm), fn,
2049 i - is_method, tf_warning_or_error);
2050 t = build_call_a (fn, i, argarray);
2051 t = fold_convert (void_type_node, t);
2052 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2053 append_to_statement_list (t, &ret);
2055 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2056 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2057 append_to_statement_list (t, &ret);
2059 if (arg2)
2061 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2062 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2063 append_to_statement_list (t, &ret);
2066 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2067 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2068 append_to_statement_list (t, &ret);
2070 return ret;
2072 else
2074 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2075 if (arg2)
2076 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2077 /* Handle default arguments. */
2078 for (parm = defparm; parm && parm != void_list_node;
2079 parm = TREE_CHAIN (parm), i++)
2080 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2081 TREE_PURPOSE (parm), fn,
2082 i - is_method, tf_warning_or_error);
2083 t = build_call_a (fn, i, argarray);
2084 t = fold_convert (void_type_node, t);
2085 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2089 /* Return code to initialize DECL with its default constructor, or
2090 NULL if there's nothing to do. */
2092 tree
2093 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2095 tree info = CP_OMP_CLAUSE_INFO (clause);
2096 tree ret = NULL;
2098 if (info)
2099 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2101 return ret;
2104 /* Return code to initialize DST with a copy constructor from SRC. */
2106 tree
2107 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2109 tree info = CP_OMP_CLAUSE_INFO (clause);
2110 tree ret = NULL;
2112 if (info)
2113 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2114 if (ret == NULL)
2115 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2117 return ret;
2120 /* Similarly, except use an assignment operator instead. */
2122 tree
2123 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2125 tree info = CP_OMP_CLAUSE_INFO (clause);
2126 tree ret = NULL;
2128 if (info)
2129 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2130 if (ret == NULL)
2131 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2133 return ret;
2136 /* Return code to destroy DECL. */
2138 tree
2139 cxx_omp_clause_dtor (tree clause, tree decl)
2141 tree info = CP_OMP_CLAUSE_INFO (clause);
2142 tree ret = NULL;
2144 if (info)
2145 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2147 return ret;
2150 /* True if OpenMP should privatize what this DECL points to rather
2151 than the DECL itself. */
2153 bool
2154 cxx_omp_privatize_by_reference (const_tree decl)
2156 return (TYPE_REF_P (TREE_TYPE (decl))
2157 || is_invisiref_parm (decl));
2160 /* Return true if DECL is const qualified var having no mutable member. */
2161 bool
2162 cxx_omp_const_qual_no_mutable (tree decl)
2164 tree type = TREE_TYPE (decl);
2165 if (TYPE_REF_P (type))
2167 if (!is_invisiref_parm (decl))
2168 return false;
2169 type = TREE_TYPE (type);
2171 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2173 /* NVR doesn't preserve const qualification of the
2174 variable's type. */
2175 tree outer = outer_curly_brace_block (current_function_decl);
2176 tree var;
2178 if (outer)
2179 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2180 if (VAR_P (var)
2181 && DECL_NAME (decl) == DECL_NAME (var)
2182 && (TYPE_MAIN_VARIANT (type)
2183 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2185 if (TYPE_READONLY (TREE_TYPE (var)))
2186 type = TREE_TYPE (var);
2187 break;
2192 if (type == error_mark_node)
2193 return false;
2195 /* Variables with const-qualified type having no mutable member
2196 are predetermined shared. */
2197 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2198 return true;
2200 return false;
2203 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2204 of DECL is predetermined. */
2206 enum omp_clause_default_kind
2207 cxx_omp_predetermined_sharing_1 (tree decl)
2209 /* Static data members are predetermined shared. */
2210 if (TREE_STATIC (decl))
2212 tree ctx = CP_DECL_CONTEXT (decl);
2213 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2214 return OMP_CLAUSE_DEFAULT_SHARED;
2216 if (c_omp_predefined_variable (decl))
2217 return OMP_CLAUSE_DEFAULT_SHARED;
2220 /* this may not be specified in data-sharing clauses, still we need
2221 to predetermined it firstprivate. */
2222 if (decl == current_class_ptr)
2223 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2225 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2228 /* Likewise, but also include the artificial vars. We don't want to
2229 disallow the artificial vars being mentioned in explicit clauses,
2230 as we use artificial vars e.g. for loop constructs with random
2231 access iterators other than pointers, but during gimplification
2232 we want to treat them as predetermined. */
2234 enum omp_clause_default_kind
2235 cxx_omp_predetermined_sharing (tree decl)
2237 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2238 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2239 return ret;
2241 /* Predetermine artificial variables holding integral values, those
2242 are usually result of gimplify_one_sizepos or SAVE_EXPR
2243 gimplification. */
2244 if (VAR_P (decl)
2245 && DECL_ARTIFICIAL (decl)
2246 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2247 && !(DECL_LANG_SPECIFIC (decl)
2248 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2249 return OMP_CLAUSE_DEFAULT_SHARED;
2251 /* Similarly for typeinfo symbols. */
2252 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2253 return OMP_CLAUSE_DEFAULT_SHARED;
2255 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2258 enum omp_clause_defaultmap_kind
2259 cxx_omp_predetermined_mapping (tree decl)
2261 /* Predetermine artificial variables holding integral values, those
2262 are usually result of gimplify_one_sizepos or SAVE_EXPR
2263 gimplification. */
2264 if (VAR_P (decl)
2265 && DECL_ARTIFICIAL (decl)
2266 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2267 && !(DECL_LANG_SPECIFIC (decl)
2268 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2269 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2271 if (c_omp_predefined_variable (decl))
2272 return OMP_CLAUSE_DEFAULTMAP_TO;
2274 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2277 /* Finalize an implicitly determined clause. */
2279 void
2280 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2282 tree decl, inner_type;
2283 bool make_shared = false;
2285 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2286 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2287 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2288 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2289 return;
2291 decl = OMP_CLAUSE_DECL (c);
2292 decl = require_complete_type (decl);
2293 inner_type = TREE_TYPE (decl);
2294 if (decl == error_mark_node)
2295 make_shared = true;
2296 else if (TYPE_REF_P (TREE_TYPE (decl)))
2297 inner_type = TREE_TYPE (inner_type);
2299 /* We're interested in the base element, not arrays. */
2300 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2301 inner_type = TREE_TYPE (inner_type);
2303 /* Check for special function availability by building a call to one.
2304 Save the results, because later we won't be in the right context
2305 for making these queries. */
2306 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2307 bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2308 if (!make_shared
2309 && CLASS_TYPE_P (inner_type)
2310 && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2311 true))
2312 make_shared = true;
2314 if (make_shared)
2316 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2317 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2318 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2322 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2323 disregarded in OpenMP construct, because it is going to be
2324 remapped during OpenMP lowering. SHARED is true if DECL
2325 is going to be shared, false if it is going to be privatized. */
2327 bool
2328 cxx_omp_disregard_value_expr (tree decl, bool shared)
2330 if (shared)
2331 return false;
2332 if (VAR_P (decl)
2333 && DECL_HAS_VALUE_EXPR_P (decl)
2334 && DECL_ARTIFICIAL (decl)
2335 && DECL_LANG_SPECIFIC (decl)
2336 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2337 return true;
2338 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2339 return true;
2340 return false;
2343 /* Fold expression X which is used as an rvalue if RVAL is true. */
2345 tree
2346 cp_fold_maybe_rvalue (tree x, bool rval)
2348 while (true)
2350 x = cp_fold (x);
2351 if (rval)
2352 x = mark_rvalue_use (x);
2353 if (rval && DECL_P (x)
2354 && !TYPE_REF_P (TREE_TYPE (x)))
2356 tree v = decl_constant_value (x);
2357 if (v != x && v != error_mark_node)
2359 x = v;
2360 continue;
2363 break;
2365 return x;
2368 /* Fold expression X which is used as an rvalue. */
2370 tree
2371 cp_fold_rvalue (tree x)
2373 return cp_fold_maybe_rvalue (x, true);
2376 /* Perform folding on expression X. */
2378 tree
2379 cp_fully_fold (tree x)
2381 if (processing_template_decl)
2382 return x;
2383 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2384 have to call both. */
2385 if (cxx_dialect >= cxx11)
2387 x = maybe_constant_value (x);
2388 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2389 a TARGET_EXPR; undo that here. */
2390 if (TREE_CODE (x) == TARGET_EXPR)
2391 x = TARGET_EXPR_INITIAL (x);
2392 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2393 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2394 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2395 x = TREE_OPERAND (x, 0);
2397 return cp_fold_rvalue (x);
2400 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2401 in some cases. */
2403 tree
2404 cp_fully_fold_init (tree x)
2406 if (processing_template_decl)
2407 return x;
2408 x = cp_fully_fold (x);
2409 cp_fold_data data (/*genericize*/false);
2410 cp_walk_tree (&x, cp_fold_r, &data, NULL);
2411 return x;
2414 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2415 and certain changes are made to the folding done. Or should be (FIXME). We
2416 never touch maybe_const, as it is only used for the C front-end
2417 C_MAYBE_CONST_EXPR. */
2419 tree
2420 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2422 return cp_fold_maybe_rvalue (x, !lval);
2425 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2427 /* Dispose of the whole FOLD_CACHE. */
2429 void
2430 clear_fold_cache (void)
2432 if (fold_cache != NULL)
2433 fold_cache->empty ();
2436 /* This function tries to fold an expression X.
2437 To avoid combinatorial explosion, folding results are kept in fold_cache.
2438 If X is invalid, we don't fold at all.
2439 For performance reasons we don't cache expressions representing a
2440 declaration or constant.
2441 Function returns X or its folded variant. */
2443 static tree
2444 cp_fold (tree x)
2446 tree op0, op1, op2, op3;
2447 tree org_x = x, r = NULL_TREE;
2448 enum tree_code code;
2449 location_t loc;
2450 bool rval_ops = true;
2452 if (!x || x == error_mark_node)
2453 return x;
2455 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2456 return x;
2458 /* Don't bother to cache DECLs or constants. */
2459 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2460 return x;
2462 if (fold_cache == NULL)
2463 fold_cache = hash_map<tree, tree>::create_ggc (101);
2465 if (tree *cached = fold_cache->get (x))
2466 return *cached;
2468 uid_sensitive_constexpr_evaluation_checker c;
2470 code = TREE_CODE (x);
2471 switch (code)
2473 case CLEANUP_POINT_EXPR:
2474 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2475 effects. */
2476 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2477 if (!TREE_SIDE_EFFECTS (r))
2478 x = r;
2479 break;
2481 case SIZEOF_EXPR:
2482 x = fold_sizeof_expr (x);
2483 break;
2485 case VIEW_CONVERT_EXPR:
2486 rval_ops = false;
2487 /* FALLTHRU */
2488 case NON_LVALUE_EXPR:
2489 CASE_CONVERT:
2491 if (VOID_TYPE_P (TREE_TYPE (x)))
2493 /* This is just to make sure we don't end up with casts to
2494 void from error_mark_node. If we just return x, then
2495 cp_fold_r might fold the operand into error_mark_node and
2496 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2497 during gimplification doesn't like such casts.
2498 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2499 folding of the operand should be in the caches and if in cp_fold_r
2500 it will modify it in place. */
2501 op0 = cp_fold (TREE_OPERAND (x, 0));
2502 if (op0 == error_mark_node)
2503 x = error_mark_node;
2504 break;
2507 loc = EXPR_LOCATION (x);
2508 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2510 if (code == CONVERT_EXPR
2511 && SCALAR_TYPE_P (TREE_TYPE (x))
2512 && op0 != void_node)
2513 /* During parsing we used convert_to_*_nofold; re-convert now using the
2514 folding variants, since fold() doesn't do those transformations. */
2515 x = fold (convert (TREE_TYPE (x), op0));
2516 else if (op0 != TREE_OPERAND (x, 0))
2518 if (op0 == error_mark_node)
2519 x = error_mark_node;
2520 else
2521 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2523 else
2524 x = fold (x);
2526 /* Conversion of an out-of-range value has implementation-defined
2527 behavior; the language considers it different from arithmetic
2528 overflow, which is undefined. */
2529 if (TREE_CODE (op0) == INTEGER_CST
2530 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2531 TREE_OVERFLOW (x) = false;
2533 break;
2535 case EXCESS_PRECISION_EXPR:
2536 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2537 x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
2538 break;
2540 case INDIRECT_REF:
2541 /* We don't need the decltype(auto) obfuscation anymore. */
2542 if (REF_PARENTHESIZED_P (x))
2544 tree p = maybe_undo_parenthesized_ref (x);
2545 if (p != x)
2546 return cp_fold (p);
2548 goto unary;
2550 case ADDR_EXPR:
2551 loc = EXPR_LOCATION (x);
2552 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2554 /* Cope with user tricks that amount to offsetof. */
2555 if (op0 != error_mark_node
2556 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2558 tree val = get_base_address (op0);
2559 if (val
2560 && INDIRECT_REF_P (val)
2561 && COMPLETE_TYPE_P (TREE_TYPE (val))
2562 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2564 val = TREE_OPERAND (val, 0);
2565 STRIP_NOPS (val);
2566 val = maybe_constant_value (val);
2567 if (TREE_CODE (val) == INTEGER_CST)
2568 return fold_offsetof (op0, TREE_TYPE (x));
2571 goto finish_unary;
2573 case REALPART_EXPR:
2574 case IMAGPART_EXPR:
2575 rval_ops = false;
2576 /* FALLTHRU */
2577 case CONJ_EXPR:
2578 case FIX_TRUNC_EXPR:
2579 case FLOAT_EXPR:
2580 case NEGATE_EXPR:
2581 case ABS_EXPR:
2582 case ABSU_EXPR:
2583 case BIT_NOT_EXPR:
2584 case TRUTH_NOT_EXPR:
2585 case FIXED_CONVERT_EXPR:
2586 unary:
2588 loc = EXPR_LOCATION (x);
2589 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2591 finish_unary:
2592 if (op0 != TREE_OPERAND (x, 0))
2594 if (op0 == error_mark_node)
2595 x = error_mark_node;
2596 else
2598 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2599 if (code == INDIRECT_REF
2600 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2602 TREE_READONLY (x) = TREE_READONLY (org_x);
2603 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2604 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2608 else
2609 x = fold (x);
2611 gcc_assert (TREE_CODE (x) != COND_EXPR
2612 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2613 break;
2615 case UNARY_PLUS_EXPR:
2616 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2617 if (op0 == error_mark_node)
2618 x = error_mark_node;
2619 else
2620 x = fold_convert (TREE_TYPE (x), op0);
2621 break;
2623 case POSTDECREMENT_EXPR:
2624 case POSTINCREMENT_EXPR:
2625 case INIT_EXPR:
2626 case PREDECREMENT_EXPR:
2627 case PREINCREMENT_EXPR:
2628 case COMPOUND_EXPR:
2629 case MODIFY_EXPR:
2630 rval_ops = false;
2631 /* FALLTHRU */
2632 case POINTER_PLUS_EXPR:
2633 case PLUS_EXPR:
2634 case POINTER_DIFF_EXPR:
2635 case MINUS_EXPR:
2636 case MULT_EXPR:
2637 case TRUNC_DIV_EXPR:
2638 case CEIL_DIV_EXPR:
2639 case FLOOR_DIV_EXPR:
2640 case ROUND_DIV_EXPR:
2641 case TRUNC_MOD_EXPR:
2642 case CEIL_MOD_EXPR:
2643 case ROUND_MOD_EXPR:
2644 case RDIV_EXPR:
2645 case EXACT_DIV_EXPR:
2646 case MIN_EXPR:
2647 case MAX_EXPR:
2648 case LSHIFT_EXPR:
2649 case RSHIFT_EXPR:
2650 case LROTATE_EXPR:
2651 case RROTATE_EXPR:
2652 case BIT_AND_EXPR:
2653 case BIT_IOR_EXPR:
2654 case BIT_XOR_EXPR:
2655 case TRUTH_AND_EXPR:
2656 case TRUTH_ANDIF_EXPR:
2657 case TRUTH_OR_EXPR:
2658 case TRUTH_ORIF_EXPR:
2659 case TRUTH_XOR_EXPR:
2660 case LT_EXPR: case LE_EXPR:
2661 case GT_EXPR: case GE_EXPR:
2662 case EQ_EXPR: case NE_EXPR:
2663 case UNORDERED_EXPR: case ORDERED_EXPR:
2664 case UNLT_EXPR: case UNLE_EXPR:
2665 case UNGT_EXPR: case UNGE_EXPR:
2666 case UNEQ_EXPR: case LTGT_EXPR:
2667 case RANGE_EXPR: case COMPLEX_EXPR:
2669 loc = EXPR_LOCATION (x);
2670 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2671 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2673 /* decltype(nullptr) has only one value, so optimize away all comparisons
2674 with that type right away, keeping them in the IL causes troubles for
2675 various optimizations. */
2676 if (COMPARISON_CLASS_P (org_x)
2677 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2678 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2680 switch (code)
2682 case EQ_EXPR:
2683 x = constant_boolean_node (true, TREE_TYPE (x));
2684 break;
2685 case NE_EXPR:
2686 x = constant_boolean_node (false, TREE_TYPE (x));
2687 break;
2688 default:
2689 gcc_unreachable ();
2691 return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2692 op0, op1);
2695 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2697 if (op0 == error_mark_node || op1 == error_mark_node)
2698 x = error_mark_node;
2699 else
2700 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2702 else
2703 x = fold (x);
2705 /* This is only needed for -Wnonnull-compare and only if
2706 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2707 generation, we do it always. */
2708 if (COMPARISON_CLASS_P (org_x))
2710 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2712 else if (COMPARISON_CLASS_P (x))
2714 if (warn_nonnull_compare
2715 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2716 suppress_warning (x, OPT_Wnonnull_compare);
2718 /* Otherwise give up on optimizing these, let GIMPLE folders
2719 optimize those later on. */
2720 else if (op0 != TREE_OPERAND (org_x, 0)
2721 || op1 != TREE_OPERAND (org_x, 1))
2723 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2724 if (warn_nonnull_compare
2725 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2726 suppress_warning (x, OPT_Wnonnull_compare);
2728 else
2729 x = org_x;
2732 break;
2734 case VEC_COND_EXPR:
2735 case COND_EXPR:
2736 loc = EXPR_LOCATION (x);
2737 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2738 op1 = cp_fold (TREE_OPERAND (x, 1));
2739 op2 = cp_fold (TREE_OPERAND (x, 2));
2741 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2743 warning_sentinel s (warn_int_in_bool_context);
2744 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2745 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2746 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2747 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2749 else if (VOID_TYPE_P (TREE_TYPE (x)))
2751 if (TREE_CODE (op0) == INTEGER_CST)
2753 /* If the condition is constant, fold can fold away
2754 the COND_EXPR. If some statement-level uses of COND_EXPR
2755 have one of the branches NULL, avoid folding crash. */
2756 if (!op1)
2757 op1 = build_empty_stmt (loc);
2758 if (!op2)
2759 op2 = build_empty_stmt (loc);
2761 else
2763 /* Otherwise, don't bother folding a void condition, since
2764 it can't produce a constant value. */
2765 if (op0 != TREE_OPERAND (x, 0)
2766 || op1 != TREE_OPERAND (x, 1)
2767 || op2 != TREE_OPERAND (x, 2))
2768 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2769 break;
2773 if (op0 != TREE_OPERAND (x, 0)
2774 || op1 != TREE_OPERAND (x, 1)
2775 || op2 != TREE_OPERAND (x, 2))
2777 if (op0 == error_mark_node
2778 || op1 == error_mark_node
2779 || op2 == error_mark_node)
2780 x = error_mark_node;
2781 else
2782 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2784 else
2785 x = fold (x);
2787 /* A COND_EXPR might have incompatible types in branches if one or both
2788 arms are bitfields. If folding exposed such a branch, fix it up. */
2789 if (TREE_CODE (x) != code
2790 && x != error_mark_node
2791 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2792 x = fold_convert (TREE_TYPE (org_x), x);
2794 break;
2796 case CALL_EXPR:
2798 tree callee = get_callee_fndecl (x);
2800 /* "Inline" calls to std::move/forward and other cast-like functions
2801 by simply folding them into a corresponding cast to their return
2802 type. This is cheaper than relying on the middle end to do so, and
2803 also means we avoid generating useless debug info for them at all.
2805 At this point the argument has already been converted into a
2806 reference, so it suffices to use a NOP_EXPR to express the
2807 cast. */
2808 if ((OPTION_SET_P (flag_fold_simple_inlines)
2809 ? flag_fold_simple_inlines
2810 : !flag_no_inline)
2811 && call_expr_nargs (x) == 1
2812 && decl_in_std_namespace_p (callee)
2813 && DECL_NAME (callee) != NULL_TREE
2814 && (id_equal (DECL_NAME (callee), "move")
2815 || id_equal (DECL_NAME (callee), "forward")
2816 || id_equal (DECL_NAME (callee), "addressof")
2817 /* This addressof equivalent is used heavily in libstdc++. */
2818 || id_equal (DECL_NAME (callee), "__addressof")
2819 || id_equal (DECL_NAME (callee), "as_const")))
2821 r = CALL_EXPR_ARG (x, 0);
2822 /* Check that the return and argument types are sane before
2823 folding. */
2824 if (INDIRECT_TYPE_P (TREE_TYPE (x))
2825 && INDIRECT_TYPE_P (TREE_TYPE (r)))
2827 if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
2828 r = build_nop (TREE_TYPE (x), r);
2829 x = cp_fold (r);
2830 break;
2834 int sv = optimize, nw = sv;
2836 /* Some built-in function calls will be evaluated at compile-time in
2837 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2838 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2839 if (callee && fndecl_built_in_p (callee) && !optimize
2840 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2841 && current_function_decl
2842 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2843 nw = 1;
2845 if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2847 switch (DECL_FE_FUNCTION_CODE (callee))
2849 /* Defer folding __builtin_is_constant_evaluated. */
2850 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2851 break;
2852 case CP_BUILT_IN_SOURCE_LOCATION:
2853 x = fold_builtin_source_location (EXPR_LOCATION (x));
2854 break;
2855 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2856 x = fold_builtin_is_corresponding_member
2857 (EXPR_LOCATION (x), call_expr_nargs (x),
2858 &CALL_EXPR_ARG (x, 0));
2859 break;
2860 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2861 x = fold_builtin_is_pointer_inverconvertible_with_class
2862 (EXPR_LOCATION (x), call_expr_nargs (x),
2863 &CALL_EXPR_ARG (x, 0));
2864 break;
2865 default:
2866 break;
2868 break;
2871 if (callee
2872 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2873 BUILT_IN_FRONTEND))
2875 x = fold_builtin_source_location (EXPR_LOCATION (x));
2876 break;
2879 bool changed = false;
2880 int m = call_expr_nargs (x);
2881 for (int i = 0; i < m; i++)
2883 r = cp_fold (CALL_EXPR_ARG (x, i));
2884 if (r != CALL_EXPR_ARG (x, i))
2886 if (r == error_mark_node)
2888 x = error_mark_node;
2889 break;
2891 if (!changed)
2892 x = copy_node (x);
2893 CALL_EXPR_ARG (x, i) = r;
2894 changed = true;
2897 if (x == error_mark_node)
2898 break;
2900 optimize = nw;
2901 r = fold (x);
2902 optimize = sv;
2904 if (TREE_CODE (r) != CALL_EXPR)
2906 x = cp_fold (r);
2907 break;
2910 optimize = nw;
2912 /* Invoke maybe_constant_value for functions declared
2913 constexpr and not called with AGGR_INIT_EXPRs.
2914 TODO:
2915 Do constexpr expansion of expressions where the call itself is not
2916 constant, but the call followed by an INDIRECT_REF is. */
2917 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2918 && !flag_no_inline)
2919 r = maybe_constant_value (x);
2920 optimize = sv;
2922 if (TREE_CODE (r) != CALL_EXPR)
2924 if (DECL_CONSTRUCTOR_P (callee))
2926 loc = EXPR_LOCATION (x);
2927 tree s = build_fold_indirect_ref_loc (loc,
2928 CALL_EXPR_ARG (x, 0));
2929 r = cp_build_init_expr (s, r);
2931 x = r;
2932 break;
2935 break;
2938 case CONSTRUCTOR:
2940 unsigned i;
2941 constructor_elt *p;
2942 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2943 vec<constructor_elt, va_gc> *nelts = NULL;
2944 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2946 tree op = cp_fold (p->value);
2947 if (op != p->value)
2949 if (op == error_mark_node)
2951 x = error_mark_node;
2952 vec_free (nelts);
2953 break;
2955 if (nelts == NULL)
2956 nelts = elts->copy ();
2957 (*nelts)[i].value = op;
2960 if (nelts)
2962 x = build_constructor (TREE_TYPE (x), nelts);
2963 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2964 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2966 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2967 x = fold (x);
2968 break;
2970 case TREE_VEC:
2972 bool changed = false;
2973 int n = TREE_VEC_LENGTH (x);
2975 for (int i = 0; i < n; i++)
2977 tree op = cp_fold (TREE_VEC_ELT (x, i));
2978 if (op != TREE_VEC_ELT (x, i))
2980 if (!changed)
2981 x = copy_node (x);
2982 TREE_VEC_ELT (x, i) = op;
2983 changed = true;
2988 break;
2990 case ARRAY_REF:
2991 case ARRAY_RANGE_REF:
2993 loc = EXPR_LOCATION (x);
2994 op0 = cp_fold (TREE_OPERAND (x, 0));
2995 op1 = cp_fold (TREE_OPERAND (x, 1));
2996 op2 = cp_fold (TREE_OPERAND (x, 2));
2997 op3 = cp_fold (TREE_OPERAND (x, 3));
2999 if (op0 != TREE_OPERAND (x, 0)
3000 || op1 != TREE_OPERAND (x, 1)
3001 || op2 != TREE_OPERAND (x, 2)
3002 || op3 != TREE_OPERAND (x, 3))
3004 if (op0 == error_mark_node
3005 || op1 == error_mark_node
3006 || op2 == error_mark_node
3007 || op3 == error_mark_node)
3008 x = error_mark_node;
3009 else
3011 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3012 TREE_READONLY (x) = TREE_READONLY (org_x);
3013 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3014 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3018 x = fold (x);
3019 break;
3021 case SAVE_EXPR:
3022 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3023 folding, evaluates to an invariant. In that case no need to wrap
3024 this folded tree with a SAVE_EXPR. */
3025 r = cp_fold (TREE_OPERAND (x, 0));
3026 if (tree_invariant_p (r))
3027 x = r;
3028 break;
3030 case REQUIRES_EXPR:
3031 x = evaluate_requires_expr (x);
3032 break;
3034 default:
3035 return org_x;
3038 if (EXPR_P (x) && TREE_CODE (x) == code)
3040 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3041 copy_warning (x, org_x);
3044 if (!c.evaluation_restricted_p ())
3046 fold_cache->put (org_x, x);
3047 /* Prevent that we try to fold an already folded result again. */
3048 if (x != org_x)
3049 fold_cache->put (x, x);
3052 return x;
3055 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3057 tree
3058 lookup_hotness_attribute (tree list)
3060 for (; list; list = TREE_CHAIN (list))
3062 tree name = get_attribute_name (list);
3063 if ((is_attribute_p ("hot", name)
3064 || is_attribute_p ("cold", name)
3065 || is_attribute_p ("likely", name)
3066 || is_attribute_p ("unlikely", name))
3067 && is_attribute_namespace_p ("", list))
3068 break;
3070 return list;
3073 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3075 static tree
3076 remove_hotness_attribute (tree list)
3078 for (tree *p = &list; *p; )
3080 tree l = *p;
3081 tree name = get_attribute_name (l);
3082 if ((is_attribute_p ("hot", name)
3083 || is_attribute_p ("cold", name)
3084 || is_attribute_p ("likely", name)
3085 || is_attribute_p ("unlikely", name))
3086 && is_attribute_namespace_p ("", l))
3088 *p = TREE_CHAIN (l);
3089 continue;
3091 p = &TREE_CHAIN (l);
3093 return list;
3096 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3097 PREDICT_EXPR. */
3099 tree
3100 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3102 if (std_attrs == error_mark_node)
3103 return std_attrs;
3104 if (tree attr = lookup_hotness_attribute (std_attrs))
3106 tree name = get_attribute_name (attr);
3107 bool hot = (is_attribute_p ("hot", name)
3108 || is_attribute_p ("likely", name));
3109 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3110 hot ? TAKEN : NOT_TAKEN);
3111 SET_EXPR_LOCATION (pred, attrs_loc);
3112 add_stmt (pred);
3113 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3114 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3115 get_attribute_name (other), name);
3116 std_attrs = remove_hotness_attribute (std_attrs);
3118 return std_attrs;
3121 /* Build IFN_ASSUME internal call for assume condition ARG. */
3123 tree
3124 build_assume_call (location_t loc, tree arg)
3126 if (!processing_template_decl)
3127 arg = fold_build_cleanup_point_expr (TREE_TYPE (arg), arg);
3128 return build_call_expr_internal_loc (loc, IFN_ASSUME, void_type_node,
3129 1, arg);
3132 /* If [[assume (cond)]] appears on this statement, handle it. */
3134 tree
3135 process_stmt_assume_attribute (tree std_attrs, tree statement,
3136 location_t attrs_loc)
3138 if (std_attrs == error_mark_node)
3139 return std_attrs;
3140 tree attr = lookup_attribute ("gnu", "assume", std_attrs);
3141 if (!attr)
3142 return std_attrs;
3143 /* The next token after the assume attribute is not ';'. */
3144 if (statement)
3146 warning_at (attrs_loc, OPT_Wattributes,
3147 "%<assume%> attribute not followed by %<;%>");
3148 attr = NULL_TREE;
3150 for (; attr; attr = lookup_attribute ("gnu", "assume", TREE_CHAIN (attr)))
3152 tree args = TREE_VALUE (attr);
3153 int nargs = list_length (args);
3154 if (nargs != 1)
3156 auto_diagnostic_group d;
3157 error_at (attrs_loc, "wrong number of arguments specified for "
3158 "%qE attribute", get_attribute_name (attr));
3159 inform (attrs_loc, "expected %i, found %i", 1, nargs);
3161 else
3163 tree arg = TREE_VALUE (args);
3164 if (!type_dependent_expression_p (arg))
3165 arg = contextual_conv_bool (arg, tf_warning_or_error);
3166 if (error_operand_p (arg))
3167 continue;
3168 finish_expr_stmt (build_assume_call (attrs_loc, arg));
3171 return remove_attribute ("gnu", "assume", std_attrs);
3174 /* Helper of fold_builtin_source_location, return the
3175 std::source_location::__impl type after performing verification
3176 on it. LOC is used for reporting any errors. */
3178 static tree
3179 get_source_location_impl_type (location_t loc)
3181 tree name = get_identifier ("source_location");
3182 tree decl = lookup_qualified_name (std_node, name);
3183 if (TREE_CODE (decl) != TYPE_DECL)
3185 auto_diagnostic_group d;
3186 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3187 qualified_name_lookup_error (std_node, name, decl, loc);
3188 else
3189 error_at (loc, "%qD is not a type", decl);
3190 return error_mark_node;
3192 name = get_identifier ("__impl");
3193 tree type = TREE_TYPE (decl);
3194 decl = lookup_qualified_name (type, name);
3195 if (TREE_CODE (decl) != TYPE_DECL)
3197 auto_diagnostic_group d;
3198 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3199 qualified_name_lookup_error (type, name, decl, loc);
3200 else
3201 error_at (loc, "%qD is not a type", decl);
3202 return error_mark_node;
3204 type = TREE_TYPE (decl);
3205 if (TREE_CODE (type) != RECORD_TYPE)
3207 error_at (loc, "%qD is not a class type", decl);
3208 return error_mark_node;
3211 int cnt = 0;
3212 for (tree field = TYPE_FIELDS (type);
3213 (field = next_aggregate_field (field)) != NULL_TREE;
3214 field = DECL_CHAIN (field))
3216 if (DECL_NAME (field) != NULL_TREE)
3218 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3219 if (strcmp (n, "_M_file_name") == 0
3220 || strcmp (n, "_M_function_name") == 0)
3222 if (TREE_TYPE (field) != const_string_type_node)
3224 error_at (loc, "%qD does not have %<const char *%> type",
3225 field);
3226 return error_mark_node;
3228 cnt++;
3229 continue;
3231 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3233 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3235 error_at (loc, "%qD does not have integral type", field);
3236 return error_mark_node;
3238 cnt++;
3239 continue;
3242 cnt = 0;
3243 break;
3245 if (cnt != 4)
3247 error_at (loc, "%<std::source_location::__impl%> does not contain only "
3248 "non-static data members %<_M_file_name%>, "
3249 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3250 return error_mark_node;
3252 return build_qualified_type (type, TYPE_QUAL_CONST);
3255 /* Type for source_location_table hash_set. */
3256 struct GTY((for_user)) source_location_table_entry {
3257 location_t loc;
3258 unsigned uid;
3259 tree var;
3262 /* Traits class for function start hash maps below. */
3264 struct source_location_table_entry_hash
3265 : ggc_remove <source_location_table_entry>
3267 typedef source_location_table_entry value_type;
3268 typedef source_location_table_entry compare_type;
3270 static hashval_t
3271 hash (const source_location_table_entry &ref)
3273 inchash::hash hstate (0);
3274 hstate.add_int (ref.loc);
3275 hstate.add_int (ref.uid);
3276 return hstate.end ();
3279 static bool
3280 equal (const source_location_table_entry &ref1,
3281 const source_location_table_entry &ref2)
3283 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3286 static void
3287 mark_deleted (source_location_table_entry &ref)
3289 ref.loc = UNKNOWN_LOCATION;
3290 ref.uid = -1U;
3291 ref.var = NULL_TREE;
3294 static const bool empty_zero_p = true;
3296 static void
3297 mark_empty (source_location_table_entry &ref)
3299 ref.loc = UNKNOWN_LOCATION;
3300 ref.uid = 0;
3301 ref.var = NULL_TREE;
3304 static bool
3305 is_deleted (const source_location_table_entry &ref)
3307 return (ref.loc == UNKNOWN_LOCATION
3308 && ref.uid == -1U
3309 && ref.var == NULL_TREE);
3312 static bool
3313 is_empty (const source_location_table_entry &ref)
3315 return (ref.loc == UNKNOWN_LOCATION
3316 && ref.uid == 0
3317 && ref.var == NULL_TREE);
3320 static void
3321 pch_nx (source_location_table_entry &p)
3323 extern void gt_pch_nx (source_location_table_entry &);
3324 gt_pch_nx (p);
3327 static void
3328 pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3330 extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3331 void *);
3332 gt_pch_nx (&p, op, cookie);
3336 static GTY(()) hash_table <source_location_table_entry_hash>
3337 *source_location_table;
3338 static GTY(()) unsigned int source_location_id;
3340 /* Fold __builtin_source_location () call. LOC is the location
3341 of the call. */
3343 tree
3344 fold_builtin_source_location (location_t loc)
3346 if (source_location_impl == NULL_TREE)
3348 auto_diagnostic_group d;
3349 source_location_impl = get_source_location_impl_type (loc);
3350 if (source_location_impl == error_mark_node)
3351 inform (loc, "evaluating %qs", "__builtin_source_location");
3353 if (source_location_impl == error_mark_node)
3354 return build_zero_cst (const_ptr_type_node);
3355 if (source_location_table == NULL)
3356 source_location_table
3357 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3358 const line_map_ordinary *map;
3359 source_location_table_entry entry;
3360 entry.loc
3361 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3362 &map);
3363 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3364 entry.var = error_mark_node;
3365 source_location_table_entry *entryp
3366 = source_location_table->find_slot (entry, INSERT);
3367 tree var;
3368 if (entryp->var)
3369 var = entryp->var;
3370 else
3372 char tmp_name[32];
3373 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3374 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3375 source_location_impl);
3376 TREE_STATIC (var) = 1;
3377 TREE_PUBLIC (var) = 0;
3378 DECL_ARTIFICIAL (var) = 1;
3379 DECL_IGNORED_P (var) = 1;
3380 DECL_EXTERNAL (var) = 0;
3381 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3382 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3383 layout_decl (var, 0);
3385 vec<constructor_elt, va_gc> *v = NULL;
3386 vec_alloc (v, 4);
3387 for (tree field = TYPE_FIELDS (source_location_impl);
3388 (field = next_aggregate_field (field)) != NULL_TREE;
3389 field = DECL_CHAIN (field))
3391 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3392 tree val = NULL_TREE;
3393 if (strcmp (n, "_M_file_name") == 0)
3395 if (const char *fname = LOCATION_FILE (loc))
3397 fname = remap_macro_filename (fname);
3398 val = build_string_literal (fname);
3400 else
3401 val = build_string_literal ("");
3403 else if (strcmp (n, "_M_function_name") == 0)
3405 const char *name = "";
3407 if (current_function_decl)
3408 name = cxx_printable_name (current_function_decl, 2);
3410 val = build_string_literal (name);
3412 else if (strcmp (n, "_M_line") == 0)
3413 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3414 else if (strcmp (n, "_M_column") == 0)
3415 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3416 else
3417 gcc_unreachable ();
3418 CONSTRUCTOR_APPEND_ELT (v, field, val);
3421 tree ctor = build_constructor (source_location_impl, v);
3422 TREE_CONSTANT (ctor) = 1;
3423 TREE_STATIC (ctor) = 1;
3424 DECL_INITIAL (var) = ctor;
3425 varpool_node::finalize_decl (var);
3426 *entryp = entry;
3427 entryp->var = var;
3430 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3433 #include "gt-cp-cp-gimplify.h"