c++: fix broken conversion in coroutines
[official-gcc.git] / gcc / cp / cp-gimplify.cc
blob5d26e59d098a325a91814dc423120a11de10b622
1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44 #include "opts.h"
46 /* Forward declarations. */
48 static tree cp_genericize_r (tree *, int *, void *);
49 static tree cp_fold_r (tree *, int *, void *);
50 static void cp_genericize_tree (tree*, bool);
51 static tree cp_fold (tree);
53 /* Genericize a TRY_BLOCK. */
55 static void
56 genericize_try_block (tree *stmt_p)
58 tree body = TRY_STMTS (*stmt_p);
59 tree cleanup = TRY_HANDLERS (*stmt_p);
61 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
64 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
66 static void
67 genericize_catch_block (tree *stmt_p)
69 tree type = HANDLER_TYPE (*stmt_p);
70 tree body = HANDLER_BODY (*stmt_p);
72 /* FIXME should the caught type go in TREE_TYPE? */
73 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
76 /* A terser interface for building a representation of an exception
77 specification. */
79 static tree
80 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
82 tree t;
84 /* FIXME should the allowed types go in TREE_TYPE? */
85 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
86 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
88 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
89 append_to_statement_list (body, &TREE_OPERAND (t, 0));
91 return t;
94 /* Genericize an EH_SPEC_BLOCK by converting it to a
95 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
97 static void
98 genericize_eh_spec_block (tree *stmt_p)
100 tree body = EH_SPEC_STMTS (*stmt_p);
101 tree allowed = EH_SPEC_RAISES (*stmt_p);
102 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
104 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
105 suppress_warning (*stmt_p);
106 suppress_warning (TREE_OPERAND (*stmt_p, 1));
109 /* Return the first non-compound statement in STMT. */
111 tree
112 first_stmt (tree stmt)
114 switch (TREE_CODE (stmt))
116 case STATEMENT_LIST:
117 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
118 return first_stmt (p->stmt);
119 return void_node;
121 case BIND_EXPR:
122 return first_stmt (BIND_EXPR_BODY (stmt));
124 default:
125 return stmt;
129 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
131 static void
132 genericize_if_stmt (tree *stmt_p)
134 tree stmt, cond, then_, else_;
135 location_t locus = EXPR_LOCATION (*stmt_p);
137 stmt = *stmt_p;
138 cond = IF_COND (stmt);
139 then_ = THEN_CLAUSE (stmt);
140 else_ = ELSE_CLAUSE (stmt);
142 if (then_ && else_)
144 tree ft = first_stmt (then_);
145 tree fe = first_stmt (else_);
146 br_predictor pr;
147 if (TREE_CODE (ft) == PREDICT_EXPR
148 && TREE_CODE (fe) == PREDICT_EXPR
149 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
150 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
152 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
153 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
154 warning_at (&richloc, OPT_Wattributes,
155 "both branches of %<if%> statement marked as %qs",
156 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
160 if (!then_)
161 then_ = build_empty_stmt (locus);
162 if (!else_)
163 else_ = build_empty_stmt (locus);
165 /* consteval if has been verified not to have the then_/else_ blocks
166 entered by gotos/case labels from elsewhere, and as then_ block
167 can contain unfolded immediate function calls, we have to discard
168 the then_ block regardless of whether else_ has side-effects or not. */
169 if (IF_STMT_CONSTEVAL_P (stmt))
171 if (block_may_fallthru (then_))
172 stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
173 void_node, else_);
174 else
175 stmt = else_;
177 else if (IF_STMT_CONSTEXPR_P (stmt))
178 stmt = integer_nonzerop (cond) ? then_ : else_;
179 else
180 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
181 protected_set_expr_location_if_unset (stmt, locus);
182 *stmt_p = stmt;
185 /* Hook into the middle of gimplifying an OMP_FOR node. */
187 static enum gimplify_status
188 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
190 tree for_stmt = *expr_p;
191 gimple_seq seq = NULL;
193 /* Protect ourselves from recursion. */
194 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
195 return GS_UNHANDLED;
196 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
198 gimplify_and_add (for_stmt, &seq);
199 gimple_seq_add_seq (pre_p, seq);
201 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
203 return GS_ALL_DONE;
206 /* Gimplify an EXPR_STMT node. */
208 static void
209 gimplify_expr_stmt (tree *stmt_p)
211 tree stmt = EXPR_STMT_EXPR (*stmt_p);
213 if (stmt == error_mark_node)
214 stmt = NULL;
216 /* Gimplification of a statement expression will nullify the
217 statement if all its side effects are moved to *PRE_P and *POST_P.
219 In this case we will not want to emit the gimplified statement.
220 However, we may still want to emit a warning, so we do that before
221 gimplification. */
222 if (stmt && warn_unused_value)
224 if (!TREE_SIDE_EFFECTS (stmt))
226 if (!IS_EMPTY_STMT (stmt)
227 && !VOID_TYPE_P (TREE_TYPE (stmt))
228 && !warning_suppressed_p (stmt, OPT_Wunused_value))
229 warning (OPT_Wunused_value, "statement with no effect");
231 else
232 warn_if_unused_value (stmt, input_location);
235 if (stmt == NULL_TREE)
236 stmt = alloc_stmt_list ();
238 *stmt_p = stmt;
241 /* Gimplify initialization from an AGGR_INIT_EXPR. */
243 static void
244 cp_gimplify_init_expr (tree *expr_p)
246 tree from = TREE_OPERAND (*expr_p, 1);
247 tree to = TREE_OPERAND (*expr_p, 0);
248 tree t;
250 if (TREE_CODE (from) == TARGET_EXPR)
251 if (tree init = TARGET_EXPR_INITIAL (from))
253 if (target_expr_needs_replace (from))
255 /* If this was changed by cp_genericize_target_expr, we need to
256 walk into it to replace uses of the slot. */
257 replace_decl (&init, TARGET_EXPR_SLOT (from), to);
258 *expr_p = init;
259 return;
261 else
262 from = init;
265 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
266 inside the TARGET_EXPR. */
267 for (t = from; t; )
269 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
271 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
272 replace the slot operand with our target.
274 Should we add a target parm to gimplify_expr instead? No, as in this
275 case we want to replace the INIT_EXPR. */
276 if (TREE_CODE (sub) == AGGR_INIT_EXPR
277 || TREE_CODE (sub) == VEC_INIT_EXPR)
279 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
280 AGGR_INIT_EXPR_SLOT (sub) = to;
281 else
282 VEC_INIT_EXPR_SLOT (sub) = to;
283 *expr_p = from;
285 /* The initialization is now a side-effect, so the container can
286 become void. */
287 if (from != sub)
288 TREE_TYPE (from) = void_type_node;
291 /* Handle aggregate NSDMI. */
292 replace_placeholders (sub, to);
294 if (t == sub)
295 break;
296 else
297 t = TREE_OPERAND (t, 1);
302 /* Gimplify a MUST_NOT_THROW_EXPR. */
304 static enum gimplify_status
305 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
307 tree stmt = *expr_p;
308 tree temp = voidify_wrapper_expr (stmt, NULL);
309 tree body = TREE_OPERAND (stmt, 0);
310 gimple_seq try_ = NULL;
311 gimple_seq catch_ = NULL;
312 gimple *mnt;
314 gimplify_and_add (body, &try_);
315 mnt = gimple_build_eh_must_not_throw (terminate_fn);
316 gimple_seq_add_stmt_without_update (&catch_, mnt);
317 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
319 gimple_seq_add_stmt_without_update (pre_p, mnt);
320 if (temp)
322 *expr_p = temp;
323 return GS_OK;
326 *expr_p = NULL;
327 return GS_ALL_DONE;
330 /* Return TRUE if an operand (OP) of a given TYPE being copied is
331 really just an empty class copy.
333 Check that the operand has a simple form so that TARGET_EXPRs and
334 non-empty CONSTRUCTORs get reduced properly, and we leave the
335 return slot optimization alone because it isn't a copy. */
337 bool
338 simple_empty_class_p (tree type, tree op, tree_code code)
340 if (TREE_CODE (op) == COMPOUND_EXPR)
341 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
342 if (SIMPLE_TARGET_EXPR_P (op)
343 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
344 /* The TARGET_EXPR is itself a simple copy, look through it. */
345 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
347 if (TREE_CODE (op) == PARM_DECL
348 && TREE_ADDRESSABLE (TREE_TYPE (op)))
350 tree fn = DECL_CONTEXT (op);
351 if (DECL_THUNK_P (fn)
352 || lambda_static_thunk_p (fn))
353 /* In a thunk, we pass through invisible reference parms, so this isn't
354 actually a copy. */
355 return false;
358 return
359 (TREE_CODE (op) == EMPTY_CLASS_EXPR
360 || code == MODIFY_EXPR
361 || is_gimple_lvalue (op)
362 || INDIRECT_REF_P (op)
363 || (TREE_CODE (op) == CONSTRUCTOR
364 && CONSTRUCTOR_NELTS (op) == 0)
365 || (TREE_CODE (op) == CALL_EXPR
366 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
367 && !TREE_CLOBBER_P (op)
368 && is_really_empty_class (type, /*ignore_vptr*/true);
371 /* Returns true if evaluating E as an lvalue has side-effects;
372 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
373 have side-effects until there is a read or write through it. */
375 static bool
376 lvalue_has_side_effects (tree e)
378 if (!TREE_SIDE_EFFECTS (e))
379 return false;
380 while (handled_component_p (e))
382 if (TREE_CODE (e) == ARRAY_REF
383 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
384 return true;
385 e = TREE_OPERAND (e, 0);
387 if (DECL_P (e))
388 /* Just naming a variable has no side-effects. */
389 return false;
390 else if (INDIRECT_REF_P (e))
391 /* Similarly, indirection has no side-effects. */
392 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
393 else
394 /* For anything else, trust TREE_SIDE_EFFECTS. */
395 return TREE_SIDE_EFFECTS (e);
398 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
399 by expressions with side-effects in other operands. */
401 static enum gimplify_status
402 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
403 bool (*gimple_test_f) (tree))
405 enum gimplify_status t
406 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
407 if (t == GS_ERROR)
408 return GS_ERROR;
409 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
410 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
411 return t;
414 /* Like gimplify_arg, but if ORDERED is set (which should be set if
415 any of the arguments this argument is sequenced before has
416 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
417 are gimplified into SSA_NAME or a fresh temporary and for
418 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
420 static enum gimplify_status
421 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
422 bool ordered)
424 enum gimplify_status t;
425 if (ordered
426 && !is_gimple_reg_type (TREE_TYPE (*arg_p))
427 && TREE_CODE (*arg_p) == TARGET_EXPR)
429 /* gimplify_arg would strip away the TARGET_EXPR, but
430 that can mean we don't copy the argument and some following
431 argument with side-effect could modify it. */
432 protected_set_expr_location (*arg_p, call_location);
433 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
435 else
437 t = gimplify_arg (arg_p, pre_p, call_location);
438 if (t == GS_ERROR)
439 return GS_ERROR;
440 else if (ordered
441 && is_gimple_reg_type (TREE_TYPE (*arg_p))
442 && is_gimple_variable (*arg_p)
443 && TREE_CODE (*arg_p) != SSA_NAME
444 /* No need to force references into register, references
445 can't be modified. */
446 && !TYPE_REF_P (TREE_TYPE (*arg_p))
447 /* And this can't be modified either. */
448 && *arg_p != current_class_ptr)
449 *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
450 return t;
455 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
458 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
460 int saved_stmts_are_full_exprs_p = 0;
461 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
462 enum tree_code code = TREE_CODE (*expr_p);
463 enum gimplify_status ret;
465 if (STATEMENT_CODE_P (code))
467 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
468 current_stmt_tree ()->stmts_are_full_exprs_p
469 = STMT_IS_FULL_EXPR_P (*expr_p);
472 switch (code)
474 case AGGR_INIT_EXPR:
475 simplify_aggr_init_expr (expr_p);
476 ret = GS_OK;
477 break;
479 case VEC_INIT_EXPR:
481 *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
482 tf_warning_or_error);
484 hash_set<tree> pset;
485 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
486 cp_genericize_tree (expr_p, false);
487 copy_if_shared (expr_p);
488 ret = GS_OK;
490 break;
492 case THROW_EXPR:
493 /* FIXME communicate throw type to back end, probably by moving
494 THROW_EXPR into ../tree.def. */
495 *expr_p = TREE_OPERAND (*expr_p, 0);
496 ret = GS_OK;
497 break;
499 case MUST_NOT_THROW_EXPR:
500 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
501 break;
503 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
504 LHS of an assignment might also be involved in the RHS, as in bug
505 25979. */
506 case INIT_EXPR:
507 cp_gimplify_init_expr (expr_p);
508 if (TREE_CODE (*expr_p) != INIT_EXPR)
509 return GS_OK;
510 /* Fall through. */
511 case MODIFY_EXPR:
512 modify_expr_case:
514 /* If the back end isn't clever enough to know that the lhs and rhs
515 types are the same, add an explicit conversion. */
516 tree op0 = TREE_OPERAND (*expr_p, 0);
517 tree op1 = TREE_OPERAND (*expr_p, 1);
519 if (!error_operand_p (op0)
520 && !error_operand_p (op1)
521 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
522 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
523 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
524 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
525 TREE_TYPE (op0), op1);
527 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
529 while (TREE_CODE (op1) == TARGET_EXPR)
530 /* We're disconnecting the initializer from its target,
531 don't create a temporary. */
532 op1 = TARGET_EXPR_INITIAL (op1);
534 /* Remove any copies of empty classes. Also drop volatile
535 variables on the RHS to avoid infinite recursion from
536 gimplify_expr trying to load the value. */
537 if (TREE_SIDE_EFFECTS (op1))
539 if (TREE_THIS_VOLATILE (op1)
540 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
541 op1 = build_fold_addr_expr (op1);
543 gimplify_and_add (op1, pre_p);
545 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
546 is_gimple_lvalue, fb_lvalue);
547 *expr_p = TREE_OPERAND (*expr_p, 0);
548 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
549 /* Avoid 'return *<retval>;' */
550 *expr_p = TREE_OPERAND (*expr_p, 0);
552 /* P0145 says that the RHS is sequenced before the LHS.
553 gimplify_modify_expr gimplifies the RHS before the LHS, but that
554 isn't quite strong enough in two cases:
556 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
557 mean it's evaluated after the LHS.
559 2) the value calculation of the RHS is also sequenced before the
560 LHS, so for scalar assignment we need to preevaluate if the
561 RHS could be affected by LHS side-effects even if it has no
562 side-effects of its own. We don't need this for classes because
563 class assignment takes its RHS by reference. */
564 else if (flag_strong_eval_order > 1
565 && TREE_CODE (*expr_p) == MODIFY_EXPR
566 && lvalue_has_side_effects (op0)
567 && (TREE_CODE (op1) == CALL_EXPR
568 || (SCALAR_TYPE_P (TREE_TYPE (op1))
569 && !TREE_CONSTANT (op1))))
570 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
572 ret = GS_OK;
573 break;
575 case EMPTY_CLASS_EXPR:
576 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
577 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
578 ret = GS_OK;
579 break;
581 case BASELINK:
582 *expr_p = BASELINK_FUNCTIONS (*expr_p);
583 ret = GS_OK;
584 break;
586 case TRY_BLOCK:
587 genericize_try_block (expr_p);
588 ret = GS_OK;
589 break;
591 case HANDLER:
592 genericize_catch_block (expr_p);
593 ret = GS_OK;
594 break;
596 case EH_SPEC_BLOCK:
597 genericize_eh_spec_block (expr_p);
598 ret = GS_OK;
599 break;
601 case USING_STMT:
602 gcc_unreachable ();
604 case FOR_STMT:
605 case WHILE_STMT:
606 case DO_STMT:
607 case SWITCH_STMT:
608 case CONTINUE_STMT:
609 case BREAK_STMT:
610 gcc_unreachable ();
612 case OMP_FOR:
613 case OMP_SIMD:
614 case OMP_DISTRIBUTE:
615 case OMP_LOOP:
616 case OMP_TASKLOOP:
617 ret = cp_gimplify_omp_for (expr_p, pre_p);
618 break;
620 case EXPR_STMT:
621 gimplify_expr_stmt (expr_p);
622 ret = GS_OK;
623 break;
625 case UNARY_PLUS_EXPR:
627 tree arg = TREE_OPERAND (*expr_p, 0);
628 tree type = TREE_TYPE (*expr_p);
629 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
630 : arg;
631 ret = GS_OK;
633 break;
635 case CALL_EXPR:
636 ret = GS_OK;
637 if (flag_strong_eval_order == 2
638 && CALL_EXPR_FN (*expr_p)
639 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
640 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
642 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
643 enum gimplify_status t
644 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
645 is_gimple_call_addr);
646 if (t == GS_ERROR)
647 ret = GS_ERROR;
648 /* GIMPLE considers most pointer conversion useless, but for
649 calls we actually care about the exact function pointer type. */
650 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
651 CALL_EXPR_FN (*expr_p)
652 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
654 if (!CALL_EXPR_FN (*expr_p))
655 /* Internal function call. */;
656 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
658 /* This is a call to a (compound) assignment operator that used
659 the operator syntax; gimplify the RHS first. */
660 gcc_assert (call_expr_nargs (*expr_p) == 2);
661 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
662 enum gimplify_status t
663 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
664 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
665 if (t == GS_ERROR)
666 ret = GS_ERROR;
668 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
670 /* Leave the last argument for gimplify_call_expr, to avoid problems
671 with __builtin_va_arg_pack(). */
672 int nargs = call_expr_nargs (*expr_p) - 1;
673 int last_side_effects_arg = -1;
674 for (int i = nargs; i > 0; --i)
675 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
677 last_side_effects_arg = i;
678 break;
680 for (int i = 0; i < nargs; ++i)
682 enum gimplify_status t
683 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
684 i < last_side_effects_arg);
685 if (t == GS_ERROR)
686 ret = GS_ERROR;
689 else if (flag_strong_eval_order
690 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
692 /* If flag_strong_eval_order, evaluate the object argument first. */
693 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
694 if (INDIRECT_TYPE_P (fntype))
695 fntype = TREE_TYPE (fntype);
696 if (TREE_CODE (fntype) == METHOD_TYPE)
698 int nargs = call_expr_nargs (*expr_p);
699 bool side_effects = false;
700 for (int i = 1; i < nargs; ++i)
701 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
703 side_effects = true;
704 break;
706 enum gimplify_status t
707 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
708 side_effects);
709 if (t == GS_ERROR)
710 ret = GS_ERROR;
713 if (ret != GS_ERROR)
715 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
716 if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
717 switch (DECL_FE_FUNCTION_CODE (decl))
719 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
720 *expr_p = boolean_false_node;
721 break;
722 case CP_BUILT_IN_SOURCE_LOCATION:
723 *expr_p
724 = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
725 break;
726 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
727 *expr_p
728 = fold_builtin_is_corresponding_member
729 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
730 &CALL_EXPR_ARG (*expr_p, 0));
731 break;
732 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
733 *expr_p
734 = fold_builtin_is_pointer_inverconvertible_with_class
735 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
736 &CALL_EXPR_ARG (*expr_p, 0));
737 break;
738 default:
739 break;
742 break;
744 case TARGET_EXPR:
745 /* A TARGET_EXPR that expresses direct-initialization should have been
746 elided by cp_gimplify_init_expr. */
747 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
748 ret = GS_UNHANDLED;
749 break;
751 case PTRMEM_CST:
752 *expr_p = cplus_expand_constant (*expr_p);
753 if (TREE_CODE (*expr_p) == PTRMEM_CST)
754 ret = GS_ERROR;
755 else
756 ret = GS_OK;
757 break;
759 case RETURN_EXPR:
760 if (TREE_OPERAND (*expr_p, 0)
761 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
762 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
764 expr_p = &TREE_OPERAND (*expr_p, 0);
765 /* Avoid going through the INIT_EXPR case, which can
766 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
767 goto modify_expr_case;
769 /* Fall through. */
771 default:
772 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
773 break;
776 /* Restore saved state. */
777 if (STATEMENT_CODE_P (code))
778 current_stmt_tree ()->stmts_are_full_exprs_p
779 = saved_stmts_are_full_exprs_p;
781 return ret;
784 static inline bool
785 is_invisiref_parm (const_tree t)
787 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
788 && DECL_BY_REFERENCE (t));
791 /* A stable comparison routine for use with splay trees and DECLs. */
793 static int
794 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
796 tree a = (tree) xa;
797 tree b = (tree) xb;
799 return DECL_UID (a) - DECL_UID (b);
802 /* OpenMP context during genericization. */
804 struct cp_genericize_omp_taskreg
806 bool is_parallel;
807 bool default_shared;
808 struct cp_genericize_omp_taskreg *outer;
809 splay_tree variables;
812 /* Return true if genericization should try to determine if
813 DECL is firstprivate or shared within task regions. */
815 static bool
816 omp_var_to_track (tree decl)
818 tree type = TREE_TYPE (decl);
819 if (is_invisiref_parm (decl))
820 type = TREE_TYPE (type);
821 else if (TYPE_REF_P (type))
822 type = TREE_TYPE (type);
823 while (TREE_CODE (type) == ARRAY_TYPE)
824 type = TREE_TYPE (type);
825 if (type == error_mark_node || !CLASS_TYPE_P (type))
826 return false;
827 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
828 return false;
829 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
830 return false;
831 return true;
834 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
836 static void
837 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
839 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
840 (splay_tree_key) decl);
841 if (n == NULL)
843 int flags = OMP_CLAUSE_DEFAULT_SHARED;
844 if (omp_ctx->outer)
845 omp_cxx_notice_variable (omp_ctx->outer, decl);
846 if (!omp_ctx->default_shared)
848 struct cp_genericize_omp_taskreg *octx;
850 for (octx = omp_ctx->outer; octx; octx = octx->outer)
852 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
853 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
855 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
856 break;
858 if (octx->is_parallel)
859 break;
861 if (octx == NULL
862 && (TREE_CODE (decl) == PARM_DECL
863 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
864 && DECL_CONTEXT (decl) == current_function_decl)))
865 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
866 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
868 /* DECL is implicitly determined firstprivate in
869 the current task construct. Ensure copy ctor and
870 dtor are instantiated, because during gimplification
871 it will be already too late. */
872 tree type = TREE_TYPE (decl);
873 if (is_invisiref_parm (decl))
874 type = TREE_TYPE (type);
875 else if (TYPE_REF_P (type))
876 type = TREE_TYPE (type);
877 while (TREE_CODE (type) == ARRAY_TYPE)
878 type = TREE_TYPE (type);
879 get_copy_ctor (type, tf_none);
880 get_dtor (type, tf_none);
883 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
887 /* If we might need to clean up a partially constructed object, break down the
888 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
889 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
890 the result. */
892 static void
893 cp_genericize_init (tree *replace, tree from, tree to)
895 tree init = NULL_TREE;
896 if (TREE_CODE (from) == VEC_INIT_EXPR)
897 init = expand_vec_init_expr (to, from, tf_warning_or_error);
898 else if (flag_exceptions
899 && TREE_CODE (from) == CONSTRUCTOR
900 && TREE_SIDE_EFFECTS (from)
901 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
903 to = cp_stabilize_reference (to);
904 replace_placeholders (from, to);
905 init = split_nonconstant_init (to, from);
908 if (init)
910 if (*replace == from)
911 /* Make cp_gimplify_init_expr call replace_decl on this
912 TARGET_EXPR_INITIAL. */
913 init = fold_convert (void_type_node, init);
914 *replace = init;
918 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
920 static void
921 cp_genericize_init_expr (tree *stmt_p)
923 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
924 tree to = TREE_OPERAND (*stmt_p, 0);
925 tree from = TREE_OPERAND (*stmt_p, 1);
926 if (SIMPLE_TARGET_EXPR_P (from)
927 /* Return gets confused if we clobber its INIT_EXPR this soon. */
928 && TREE_CODE (to) != RESULT_DECL)
929 from = TARGET_EXPR_INITIAL (from);
930 cp_genericize_init (stmt_p, from, to);
933 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
934 replace_decl later when we know what we're initializing. */
936 static void
937 cp_genericize_target_expr (tree *stmt_p)
939 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
940 tree slot = TARGET_EXPR_SLOT (*stmt_p);
941 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
942 TARGET_EXPR_INITIAL (*stmt_p), slot);
943 gcc_assert (!DECL_INITIAL (slot));
946 /* Genericization context. */
948 struct cp_genericize_data
950 hash_set<tree> *p_set;
951 auto_vec<tree> bind_expr_stack;
952 struct cp_genericize_omp_taskreg *omp_ctx;
953 tree try_block;
954 bool no_sanitize_p;
955 bool handle_invisiref_parm_p;
958 /* Perform any pre-gimplification folding of C++ front end trees to
959 GENERIC.
960 Note: The folding of non-omp cases is something to move into
961 the middle-end. As for now we have most foldings only on GENERIC
962 in fold-const, we need to perform this before transformation to
963 GIMPLE-form. */
965 struct cp_fold_data
967 hash_set<tree> pset;
968 bool genericize; // called from cp_fold_function?
970 cp_fold_data (bool g): genericize (g) {}
973 static tree
974 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
976 cp_fold_data *data = (cp_fold_data*)data_;
977 tree stmt = *stmt_p;
978 enum tree_code code = TREE_CODE (stmt);
980 switch (code)
982 case PTRMEM_CST:
983 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
984 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
986 if (!data->pset.add (stmt))
987 error_at (PTRMEM_CST_LOCATION (stmt),
988 "taking address of an immediate function %qD",
989 PTRMEM_CST_MEMBER (stmt));
990 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
991 break;
993 break;
995 case ADDR_EXPR:
996 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
997 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
999 error_at (EXPR_LOCATION (stmt),
1000 "taking address of an immediate function %qD",
1001 TREE_OPERAND (stmt, 0));
1002 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1003 break;
1005 break;
1007 case CALL_EXPR:
1008 if (tree fndecl = cp_get_callee_fndecl_nofold (stmt))
1009 if (DECL_IMMEDIATE_FUNCTION_P (fndecl)
1010 && source_location_current_p (fndecl))
1011 *stmt_p = stmt = cxx_constant_value (stmt);
1012 break;
1014 default:
1015 break;
1018 *stmt_p = stmt = cp_fold (*stmt_p);
1020 if (data->pset.add (stmt))
1022 /* Don't walk subtrees of stmts we've already walked once, otherwise
1023 we can have exponential complexity with e.g. lots of nested
1024 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1025 always the same tree, which the first time cp_fold_r has been
1026 called on it had the subtrees walked. */
1027 *walk_subtrees = 0;
1028 return NULL;
1031 code = TREE_CODE (stmt);
1032 switch (code)
1034 tree x;
1035 int i, n;
1036 case OMP_FOR:
1037 case OMP_SIMD:
1038 case OMP_DISTRIBUTE:
1039 case OMP_LOOP:
1040 case OMP_TASKLOOP:
1041 case OACC_LOOP:
1042 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1043 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1044 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1045 x = OMP_FOR_COND (stmt);
1046 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1048 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1049 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1051 else if (x && TREE_CODE (x) == TREE_VEC)
1053 n = TREE_VEC_LENGTH (x);
1054 for (i = 0; i < n; i++)
1056 tree o = TREE_VEC_ELT (x, i);
1057 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1058 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1061 x = OMP_FOR_INCR (stmt);
1062 if (x && TREE_CODE (x) == TREE_VEC)
1064 n = TREE_VEC_LENGTH (x);
1065 for (i = 0; i < n; i++)
1067 tree o = TREE_VEC_ELT (x, i);
1068 if (o && TREE_CODE (o) == MODIFY_EXPR)
1069 o = TREE_OPERAND (o, 1);
1070 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1071 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1073 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1074 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1078 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1079 *walk_subtrees = 0;
1080 return NULL;
1082 case IF_STMT:
1083 if (IF_STMT_CONSTEVAL_P (stmt))
1085 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1086 boolean_false_node. */
1087 cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1088 cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1089 *walk_subtrees = 0;
1090 return NULL;
1092 break;
1094 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1095 here rather than in cp_genericize to avoid problems with the invisible
1096 reference transition. */
1097 case INIT_EXPR:
1098 if (data->genericize)
1099 cp_genericize_init_expr (stmt_p);
1100 break;
1102 case TARGET_EXPR:
1103 if (data->genericize)
1104 cp_genericize_target_expr (stmt_p);
1106 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1107 that case, use it in place of this one. */
1108 if (tree &init = TARGET_EXPR_INITIAL (stmt))
1110 cp_walk_tree (&init, cp_fold_r, data, NULL);
1111 *walk_subtrees = 0;
1112 if (TREE_CODE (init) == TARGET_EXPR)
1113 *stmt_p = init;
1115 break;
1117 default:
1118 break;
1121 return NULL;
1124 /* Fold ALL the trees! FIXME we should be able to remove this, but
1125 apparently that still causes optimization regressions. */
1127 void
1128 cp_fold_function (tree fndecl)
1130 cp_fold_data data (/*genericize*/true);
1131 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1134 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1136 static tree genericize_spaceship (tree expr)
1138 iloc_sentinel s (cp_expr_location (expr));
1139 tree type = TREE_TYPE (expr);
1140 tree op0 = TREE_OPERAND (expr, 0);
1141 tree op1 = TREE_OPERAND (expr, 1);
1142 return genericize_spaceship (input_location, type, op0, op1);
1145 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1146 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1147 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1148 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1150 tree
1151 predeclare_vla (tree expr)
1153 tree type = TREE_TYPE (expr);
1154 if (type == error_mark_node)
1155 return expr;
1156 if (is_typedef_decl (expr))
1157 type = DECL_ORIGINAL_TYPE (expr);
1159 /* We need to strip pointers for gimplify_type_sizes. */
1160 tree vla = type;
1161 while (POINTER_TYPE_P (vla))
1163 if (TYPE_NAME (vla))
1164 return expr;
1165 vla = TREE_TYPE (vla);
1167 if (vla == type || TYPE_NAME (vla)
1168 || !variably_modified_type_p (vla, NULL_TREE))
1169 return expr;
1171 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1172 DECL_ARTIFICIAL (decl) = 1;
1173 TYPE_NAME (vla) = decl;
1174 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1175 if (DECL_P (expr))
1177 add_stmt (dexp);
1178 return NULL_TREE;
1180 else
1182 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1183 return expr;
1187 /* Perform any pre-gimplification lowering of C++ front end trees to
1188 GENERIC. */
1190 static tree
1191 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1193 tree stmt = *stmt_p;
1194 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1195 hash_set<tree> *p_set = wtd->p_set;
1197 /* If in an OpenMP context, note var uses. */
1198 if (UNLIKELY (wtd->omp_ctx != NULL)
1199 && (VAR_P (stmt)
1200 || TREE_CODE (stmt) == PARM_DECL
1201 || TREE_CODE (stmt) == RESULT_DECL)
1202 && omp_var_to_track (stmt))
1203 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1205 /* Don't dereference parms in a thunk, pass the references through. */
1206 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1207 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1209 *walk_subtrees = 0;
1210 return NULL;
1213 /* Dereference invisible reference parms. */
1214 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1216 *stmt_p = convert_from_reference (stmt);
1217 p_set->add (*stmt_p);
1218 *walk_subtrees = 0;
1219 return NULL;
1222 /* Map block scope extern declarations to visible declarations with the
1223 same name and type in outer scopes if any. */
1224 if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1225 if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1227 if (alias != error_mark_node)
1229 *stmt_p = alias;
1230 TREE_USED (alias) |= TREE_USED (stmt);
1232 *walk_subtrees = 0;
1233 return NULL;
1236 if (TREE_CODE (stmt) == INTEGER_CST
1237 && TYPE_REF_P (TREE_TYPE (stmt))
1238 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1239 && !wtd->no_sanitize_p)
1241 ubsan_maybe_instrument_reference (stmt_p);
1242 if (*stmt_p != stmt)
1244 *walk_subtrees = 0;
1245 return NULL_TREE;
1249 /* Other than invisiref parms, don't walk the same tree twice. */
1250 if (p_set->contains (stmt))
1252 *walk_subtrees = 0;
1253 return NULL_TREE;
1256 switch (TREE_CODE (stmt))
1258 case ADDR_EXPR:
1259 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1261 /* If in an OpenMP context, note var uses. */
1262 if (UNLIKELY (wtd->omp_ctx != NULL)
1263 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1264 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1265 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1266 *walk_subtrees = 0;
1268 break;
1270 case RETURN_EXPR:
1271 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1272 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1273 *walk_subtrees = 0;
1274 break;
1276 case OMP_CLAUSE:
1277 switch (OMP_CLAUSE_CODE (stmt))
1279 case OMP_CLAUSE_LASTPRIVATE:
1280 /* Don't dereference an invisiref in OpenMP clauses. */
1281 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1283 *walk_subtrees = 0;
1284 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1285 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1286 cp_genericize_r, data, NULL);
1288 break;
1289 case OMP_CLAUSE_PRIVATE:
1290 /* Don't dereference an invisiref in OpenMP clauses. */
1291 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1292 *walk_subtrees = 0;
1293 else if (wtd->omp_ctx != NULL)
1295 /* Private clause doesn't cause any references to the
1296 var in outer contexts, avoid calling
1297 omp_cxx_notice_variable for it. */
1298 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1299 wtd->omp_ctx = NULL;
1300 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1301 data, NULL);
1302 wtd->omp_ctx = old;
1303 *walk_subtrees = 0;
1305 break;
1306 case OMP_CLAUSE_SHARED:
1307 case OMP_CLAUSE_FIRSTPRIVATE:
1308 case OMP_CLAUSE_COPYIN:
1309 case OMP_CLAUSE_COPYPRIVATE:
1310 case OMP_CLAUSE_INCLUSIVE:
1311 case OMP_CLAUSE_EXCLUSIVE:
1312 /* Don't dereference an invisiref in OpenMP clauses. */
1313 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1314 *walk_subtrees = 0;
1315 break;
1316 case OMP_CLAUSE_REDUCTION:
1317 case OMP_CLAUSE_IN_REDUCTION:
1318 case OMP_CLAUSE_TASK_REDUCTION:
1319 /* Don't dereference an invisiref in reduction clause's
1320 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1321 still needs to be genericized. */
1322 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1324 *walk_subtrees = 0;
1325 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1326 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1327 cp_genericize_r, data, NULL);
1328 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1329 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1330 cp_genericize_r, data, NULL);
1332 break;
1333 default:
1334 break;
1336 break;
1338 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1339 to lower this construct before scanning it, so we need to lower these
1340 before doing anything else. */
1341 case CLEANUP_STMT:
1342 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1343 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1344 : TRY_FINALLY_EXPR,
1345 void_type_node,
1346 CLEANUP_BODY (stmt),
1347 CLEANUP_EXPR (stmt));
1348 break;
1350 case IF_STMT:
1351 genericize_if_stmt (stmt_p);
1352 /* *stmt_p has changed, tail recurse to handle it again. */
1353 return cp_genericize_r (stmt_p, walk_subtrees, data);
1355 /* COND_EXPR might have incompatible types in branches if one or both
1356 arms are bitfields. Fix it up now. */
1357 case COND_EXPR:
1359 tree type_left
1360 = (TREE_OPERAND (stmt, 1)
1361 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1362 : NULL_TREE);
1363 tree type_right
1364 = (TREE_OPERAND (stmt, 2)
1365 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1366 : NULL_TREE);
1367 if (type_left
1368 && !useless_type_conversion_p (TREE_TYPE (stmt),
1369 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1371 TREE_OPERAND (stmt, 1)
1372 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1373 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1374 type_left));
1376 if (type_right
1377 && !useless_type_conversion_p (TREE_TYPE (stmt),
1378 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1380 TREE_OPERAND (stmt, 2)
1381 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1382 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1383 type_right));
1386 break;
1388 case BIND_EXPR:
1389 if (UNLIKELY (wtd->omp_ctx != NULL))
1391 tree decl;
1392 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1393 if (VAR_P (decl)
1394 && !DECL_EXTERNAL (decl)
1395 && omp_var_to_track (decl))
1397 splay_tree_node n
1398 = splay_tree_lookup (wtd->omp_ctx->variables,
1399 (splay_tree_key) decl);
1400 if (n == NULL)
1401 splay_tree_insert (wtd->omp_ctx->variables,
1402 (splay_tree_key) decl,
1403 TREE_STATIC (decl)
1404 ? OMP_CLAUSE_DEFAULT_SHARED
1405 : OMP_CLAUSE_DEFAULT_PRIVATE);
1408 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1410 /* The point here is to not sanitize static initializers. */
1411 bool no_sanitize_p = wtd->no_sanitize_p;
1412 wtd->no_sanitize_p = true;
1413 for (tree decl = BIND_EXPR_VARS (stmt);
1414 decl;
1415 decl = DECL_CHAIN (decl))
1416 if (VAR_P (decl)
1417 && TREE_STATIC (decl)
1418 && DECL_INITIAL (decl))
1419 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1420 wtd->no_sanitize_p = no_sanitize_p;
1422 wtd->bind_expr_stack.safe_push (stmt);
1423 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1424 cp_genericize_r, data, NULL);
1425 wtd->bind_expr_stack.pop ();
1426 break;
1428 case USING_STMT:
1430 tree block = NULL_TREE;
1432 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1433 BLOCK, and append an IMPORTED_DECL to its
1434 BLOCK_VARS chained list. */
1435 if (wtd->bind_expr_stack.exists ())
1437 int i;
1438 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1439 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1440 break;
1442 if (block)
1444 tree decl = TREE_OPERAND (stmt, 0);
1445 gcc_assert (decl);
1447 if (undeduced_auto_decl (decl))
1448 /* Omit from the GENERIC, the back-end can't handle it. */;
1449 else
1451 tree using_directive = make_node (IMPORTED_DECL);
1452 TREE_TYPE (using_directive) = void_type_node;
1453 DECL_CONTEXT (using_directive) = current_function_decl;
1455 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1456 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1457 BLOCK_VARS (block) = using_directive;
1460 /* The USING_STMT won't appear in GENERIC. */
1461 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1462 *walk_subtrees = 0;
1464 break;
1466 case DECL_EXPR:
1467 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1469 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1470 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1471 *walk_subtrees = 0;
1473 else
1475 tree d = DECL_EXPR_DECL (stmt);
1476 if (VAR_P (d))
1477 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1479 break;
1481 case OMP_PARALLEL:
1482 case OMP_TASK:
1483 case OMP_TASKLOOP:
1485 struct cp_genericize_omp_taskreg omp_ctx;
1486 tree c, decl;
1487 splay_tree_node n;
1489 *walk_subtrees = 0;
1490 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1491 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1492 omp_ctx.default_shared = omp_ctx.is_parallel;
1493 omp_ctx.outer = wtd->omp_ctx;
1494 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1495 wtd->omp_ctx = &omp_ctx;
1496 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1497 switch (OMP_CLAUSE_CODE (c))
1499 case OMP_CLAUSE_SHARED:
1500 case OMP_CLAUSE_PRIVATE:
1501 case OMP_CLAUSE_FIRSTPRIVATE:
1502 case OMP_CLAUSE_LASTPRIVATE:
1503 decl = OMP_CLAUSE_DECL (c);
1504 if (decl == error_mark_node || !omp_var_to_track (decl))
1505 break;
1506 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1507 if (n != NULL)
1508 break;
1509 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1510 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1511 ? OMP_CLAUSE_DEFAULT_SHARED
1512 : OMP_CLAUSE_DEFAULT_PRIVATE);
1513 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1514 omp_cxx_notice_variable (omp_ctx.outer, decl);
1515 break;
1516 case OMP_CLAUSE_DEFAULT:
1517 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1518 omp_ctx.default_shared = true;
1519 default:
1520 break;
1522 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1523 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1524 cp_genericize_r, cp_walk_subtrees);
1525 else
1526 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1527 wtd->omp_ctx = omp_ctx.outer;
1528 splay_tree_delete (omp_ctx.variables);
1530 break;
1532 case OMP_TARGET:
1533 cfun->has_omp_target = true;
1534 break;
1536 case TRY_BLOCK:
1538 *walk_subtrees = 0;
1539 tree try_block = wtd->try_block;
1540 wtd->try_block = stmt;
1541 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1542 wtd->try_block = try_block;
1543 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1545 break;
1547 case MUST_NOT_THROW_EXPR:
1548 /* MUST_NOT_THROW_COND might be something else with TM. */
1549 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1551 *walk_subtrees = 0;
1552 tree try_block = wtd->try_block;
1553 wtd->try_block = stmt;
1554 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1555 wtd->try_block = try_block;
1557 break;
1559 case THROW_EXPR:
1561 location_t loc = location_of (stmt);
1562 if (warning_suppressed_p (stmt /* What warning? */))
1563 /* Never mind. */;
1564 else if (wtd->try_block)
1566 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1568 auto_diagnostic_group d;
1569 if (warning_at (loc, OPT_Wterminate,
1570 "%<throw%> will always call %<terminate%>")
1571 && cxx_dialect >= cxx11
1572 && DECL_DESTRUCTOR_P (current_function_decl))
1573 inform (loc, "in C++11 destructors default to %<noexcept%>");
1576 else
1578 if (warn_cxx11_compat && cxx_dialect < cxx11
1579 && DECL_DESTRUCTOR_P (current_function_decl)
1580 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1581 == NULL_TREE)
1582 && (get_defaulted_eh_spec (current_function_decl)
1583 == empty_except_spec))
1584 warning_at (loc, OPT_Wc__11_compat,
1585 "in C++11 this %<throw%> will call %<terminate%> "
1586 "because destructors default to %<noexcept%>");
1589 break;
1591 case CONVERT_EXPR:
1592 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt)));
1593 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1594 break;
1596 case SPACESHIP_EXPR:
1597 *stmt_p = genericize_spaceship (*stmt_p);
1598 break;
1600 case PTRMEM_CST:
1601 /* By the time we get here we're handing off to the back end, so we don't
1602 need or want to preserve PTRMEM_CST anymore. */
1603 *stmt_p = cplus_expand_constant (stmt);
1604 *walk_subtrees = 0;
1605 break;
1607 case MEM_REF:
1608 /* For MEM_REF, make sure not to sanitize the second operand even
1609 if it has reference type. It is just an offset with a type
1610 holding other information. There is no other processing we
1611 need to do for INTEGER_CSTs, so just ignore the second argument
1612 unconditionally. */
1613 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1614 *walk_subtrees = 0;
1615 break;
1617 case NOP_EXPR:
1618 *stmt_p = predeclare_vla (*stmt_p);
1619 if (!wtd->no_sanitize_p
1620 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1621 && TYPE_REF_P (TREE_TYPE (stmt)))
1622 ubsan_maybe_instrument_reference (stmt_p);
1623 break;
1625 case CALL_EXPR:
1626 /* Evaluate function concept checks instead of treating them as
1627 normal functions. */
1628 if (concept_check_p (stmt))
1630 *stmt_p = evaluate_concept_check (stmt);
1631 * walk_subtrees = 0;
1632 break;
1635 if (!wtd->no_sanitize_p
1636 && sanitize_flags_p ((SANITIZE_NULL
1637 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1639 tree fn = CALL_EXPR_FN (stmt);
1640 if (fn != NULL_TREE
1641 && !error_operand_p (fn)
1642 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1643 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1645 bool is_ctor
1646 = TREE_CODE (fn) == ADDR_EXPR
1647 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1648 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1649 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1650 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1651 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1652 cp_ubsan_maybe_instrument_member_call (stmt);
1654 else if (fn == NULL_TREE
1655 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1656 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1657 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1658 *walk_subtrees = 0;
1660 /* Fall through. */
1661 case AGGR_INIT_EXPR:
1662 /* For calls to a multi-versioned function, overload resolution
1663 returns the function with the highest target priority, that is,
1664 the version that will checked for dispatching first. If this
1665 version is inlinable, a direct call to this version can be made
1666 otherwise the call should go through the dispatcher. */
1668 tree fn = cp_get_callee_fndecl_nofold (stmt);
1669 if (fn && DECL_FUNCTION_VERSIONED (fn)
1670 && (current_function_decl == NULL
1671 || !targetm.target_option.can_inline_p (current_function_decl,
1672 fn)))
1673 if (tree dis = get_function_version_dispatcher (fn))
1675 mark_versions_used (dis);
1676 dis = build_address (dis);
1677 if (TREE_CODE (stmt) == CALL_EXPR)
1678 CALL_EXPR_FN (stmt) = dis;
1679 else
1680 AGGR_INIT_EXPR_FN (stmt) = dis;
1683 break;
1685 case TARGET_EXPR:
1686 if (TARGET_EXPR_INITIAL (stmt)
1687 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1688 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1689 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1690 break;
1692 case TEMPLATE_ID_EXPR:
1693 gcc_assert (concept_check_p (stmt));
1694 /* Emit the value of the concept check. */
1695 *stmt_p = evaluate_concept_check (stmt);
1696 walk_subtrees = 0;
1697 break;
1699 case OMP_DISTRIBUTE:
1700 /* Need to explicitly instantiate copy ctors on class iterators of
1701 composite distribute parallel for. */
1702 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1704 tree *data[4] = { NULL, NULL, NULL, NULL };
1705 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1706 find_combined_omp_for, data, NULL);
1707 if (inner != NULL_TREE
1708 && TREE_CODE (inner) == OMP_FOR)
1710 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1711 if (OMP_FOR_ORIG_DECLS (inner)
1712 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1713 i)) == TREE_LIST
1714 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1715 i)))
1717 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1718 /* Class iterators aren't allowed on OMP_SIMD, so the only
1719 case we need to solve is distribute parallel for. */
1720 gcc_assert (TREE_CODE (inner) == OMP_FOR
1721 && data[1]);
1722 tree orig_decl = TREE_PURPOSE (orig);
1723 tree c, cl = NULL_TREE;
1724 for (c = OMP_FOR_CLAUSES (inner);
1725 c; c = OMP_CLAUSE_CHAIN (c))
1726 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1727 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1728 && OMP_CLAUSE_DECL (c) == orig_decl)
1730 cl = c;
1731 break;
1733 if (cl == NULL_TREE)
1735 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1736 c; c = OMP_CLAUSE_CHAIN (c))
1737 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1738 && OMP_CLAUSE_DECL (c) == orig_decl)
1740 cl = c;
1741 break;
1744 if (cl)
1746 orig_decl = require_complete_type (orig_decl);
1747 tree inner_type = TREE_TYPE (orig_decl);
1748 if (orig_decl == error_mark_node)
1749 continue;
1750 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1751 inner_type = TREE_TYPE (inner_type);
1753 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1754 inner_type = TREE_TYPE (inner_type);
1755 get_copy_ctor (inner_type, tf_warning_or_error);
1760 /* FALLTHRU */
1762 case FOR_STMT:
1763 case WHILE_STMT:
1764 case DO_STMT:
1765 case SWITCH_STMT:
1766 case CONTINUE_STMT:
1767 case BREAK_STMT:
1768 case OMP_FOR:
1769 case OMP_SIMD:
1770 case OMP_LOOP:
1771 case OACC_LOOP:
1772 case STATEMENT_LIST:
1773 /* These cases are handled by shared code. */
1774 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1775 cp_genericize_r, cp_walk_subtrees);
1776 break;
1778 case BIT_CAST_EXPR:
1779 *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1780 TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1781 break;
1783 default:
1784 if (IS_TYPE_OR_DECL_P (stmt))
1785 *walk_subtrees = 0;
1786 break;
1789 p_set->add (*stmt_p);
1791 return NULL;
1794 /* Lower C++ front end trees to GENERIC in T_P. */
1796 static void
1797 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1799 struct cp_genericize_data wtd;
1801 wtd.p_set = new hash_set<tree>;
1802 wtd.bind_expr_stack.create (0);
1803 wtd.omp_ctx = NULL;
1804 wtd.try_block = NULL_TREE;
1805 wtd.no_sanitize_p = false;
1806 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1807 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1808 delete wtd.p_set;
1809 if (sanitize_flags_p (SANITIZE_VPTR))
1810 cp_ubsan_instrument_member_accesses (t_p);
1813 /* If a function that should end with a return in non-void
1814 function doesn't obviously end with return, add ubsan
1815 instrumentation code to verify it at runtime. If -fsanitize=return
1816 is not enabled, instrument __builtin_unreachable. */
1818 static void
1819 cp_maybe_instrument_return (tree fndecl)
1821 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1822 || DECL_CONSTRUCTOR_P (fndecl)
1823 || DECL_DESTRUCTOR_P (fndecl)
1824 || !targetm.warn_func_return (fndecl))
1825 return;
1827 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1828 /* Don't add __builtin_unreachable () if not optimizing, it will not
1829 improve any optimizations in that case, just break UB code.
1830 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1831 UBSan covers this with ubsan_instrument_return above where sufficient
1832 information is provided, while the __builtin_unreachable () below
1833 if return sanitization is disabled will just result in hard to
1834 understand runtime error without location. */
1835 && ((!optimize && !flag_unreachable_traps)
1836 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1837 return;
1839 tree t = DECL_SAVED_TREE (fndecl);
1840 while (t)
1842 switch (TREE_CODE (t))
1844 case BIND_EXPR:
1845 t = BIND_EXPR_BODY (t);
1846 continue;
1847 case TRY_FINALLY_EXPR:
1848 case CLEANUP_POINT_EXPR:
1849 t = TREE_OPERAND (t, 0);
1850 continue;
1851 case STATEMENT_LIST:
1853 tree_stmt_iterator i = tsi_last (t);
1854 while (!tsi_end_p (i))
1856 tree p = tsi_stmt (i);
1857 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1858 break;
1859 tsi_prev (&i);
1861 if (!tsi_end_p (i))
1863 t = tsi_stmt (i);
1864 continue;
1867 break;
1868 case RETURN_EXPR:
1869 return;
1870 default:
1871 break;
1873 break;
1875 if (t == NULL_TREE)
1876 return;
1877 tree *p = &DECL_SAVED_TREE (fndecl);
1878 if (TREE_CODE (*p) == BIND_EXPR)
1879 p = &BIND_EXPR_BODY (*p);
1881 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1882 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1883 t = ubsan_instrument_return (loc);
1884 else
1885 t = build_builtin_unreachable (BUILTINS_LOCATION);
1887 append_to_statement_list (t, p);
1890 void
1891 cp_genericize (tree fndecl)
1893 tree t;
1895 /* Fix up the types of parms passed by invisible reference. */
1896 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1897 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1899 /* If a function's arguments are copied to create a thunk,
1900 then DECL_BY_REFERENCE will be set -- but the type of the
1901 argument will be a pointer type, so we will never get
1902 here. */
1903 gcc_assert (!DECL_BY_REFERENCE (t));
1904 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1905 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1906 DECL_BY_REFERENCE (t) = 1;
1907 TREE_ADDRESSABLE (t) = 0;
1908 relayout_decl (t);
1911 /* Do the same for the return value. */
1912 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1914 t = DECL_RESULT (fndecl);
1915 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1916 DECL_BY_REFERENCE (t) = 1;
1917 TREE_ADDRESSABLE (t) = 0;
1918 relayout_decl (t);
1919 if (DECL_NAME (t))
1921 /* Adjust DECL_VALUE_EXPR of the original var. */
1922 tree outer = outer_curly_brace_block (current_function_decl);
1923 tree var;
1925 if (outer)
1926 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1927 if (VAR_P (var)
1928 && DECL_NAME (t) == DECL_NAME (var)
1929 && DECL_HAS_VALUE_EXPR_P (var)
1930 && DECL_VALUE_EXPR (var) == t)
1932 tree val = convert_from_reference (t);
1933 SET_DECL_VALUE_EXPR (var, val);
1934 break;
1939 /* If we're a clone, the body is already GIMPLE. */
1940 if (DECL_CLONED_FUNCTION_P (fndecl))
1941 return;
1943 /* Allow cp_genericize calls to be nested. */
1944 bc_state_t save_state;
1945 save_bc_state (&save_state);
1947 /* We do want to see every occurrence of the parms, so we can't just use
1948 walk_tree's hash functionality. */
1949 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1951 cp_maybe_instrument_return (fndecl);
1953 /* Do everything else. */
1954 c_genericize (fndecl);
1955 restore_bc_state (&save_state);
1958 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1959 NULL if there is in fact nothing to do. ARG2 may be null if FN
1960 actually only takes one argument. */
1962 static tree
1963 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1965 tree defparm, parm, t;
1966 int i = 0;
1967 int nargs;
1968 tree *argarray;
1970 if (fn == NULL)
1971 return NULL;
1973 nargs = list_length (DECL_ARGUMENTS (fn));
1974 argarray = XALLOCAVEC (tree, nargs);
1976 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1977 if (arg2)
1978 defparm = TREE_CHAIN (defparm);
1980 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1981 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1983 tree inner_type = TREE_TYPE (arg1);
1984 tree start1, end1, p1;
1985 tree start2 = NULL, p2 = NULL;
1986 tree ret = NULL, lab;
1988 start1 = arg1;
1989 start2 = arg2;
1992 inner_type = TREE_TYPE (inner_type);
1993 start1 = build4 (ARRAY_REF, inner_type, start1,
1994 size_zero_node, NULL, NULL);
1995 if (arg2)
1996 start2 = build4 (ARRAY_REF, inner_type, start2,
1997 size_zero_node, NULL, NULL);
1999 while (TREE_CODE (inner_type) == ARRAY_TYPE);
2000 start1 = build_fold_addr_expr_loc (input_location, start1);
2001 if (arg2)
2002 start2 = build_fold_addr_expr_loc (input_location, start2);
2004 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2005 end1 = fold_build_pointer_plus (start1, end1);
2007 p1 = create_tmp_var (TREE_TYPE (start1));
2008 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2009 append_to_statement_list (t, &ret);
2011 if (arg2)
2013 p2 = create_tmp_var (TREE_TYPE (start2));
2014 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2015 append_to_statement_list (t, &ret);
2018 lab = create_artificial_label (input_location);
2019 t = build1 (LABEL_EXPR, void_type_node, lab);
2020 append_to_statement_list (t, &ret);
2022 argarray[i++] = p1;
2023 if (arg2)
2024 argarray[i++] = p2;
2025 /* Handle default arguments. */
2026 for (parm = defparm; parm && parm != void_list_node;
2027 parm = TREE_CHAIN (parm), i++)
2028 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2029 TREE_PURPOSE (parm), fn,
2030 i - is_method, tf_warning_or_error);
2031 t = build_call_a (fn, i, argarray);
2032 t = fold_convert (void_type_node, t);
2033 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2034 append_to_statement_list (t, &ret);
2036 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2037 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2038 append_to_statement_list (t, &ret);
2040 if (arg2)
2042 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2043 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2044 append_to_statement_list (t, &ret);
2047 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2048 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2049 append_to_statement_list (t, &ret);
2051 return ret;
2053 else
2055 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2056 if (arg2)
2057 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2058 /* Handle default arguments. */
2059 for (parm = defparm; parm && parm != void_list_node;
2060 parm = TREE_CHAIN (parm), i++)
2061 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2062 TREE_PURPOSE (parm), fn,
2063 i - is_method, tf_warning_or_error);
2064 t = build_call_a (fn, i, argarray);
2065 t = fold_convert (void_type_node, t);
2066 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2070 /* Return code to initialize DECL with its default constructor, or
2071 NULL if there's nothing to do. */
2073 tree
2074 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2076 tree info = CP_OMP_CLAUSE_INFO (clause);
2077 tree ret = NULL;
2079 if (info)
2080 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2082 return ret;
2085 /* Return code to initialize DST with a copy constructor from SRC. */
2087 tree
2088 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2090 tree info = CP_OMP_CLAUSE_INFO (clause);
2091 tree ret = NULL;
2093 if (info)
2094 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2095 if (ret == NULL)
2096 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2098 return ret;
2101 /* Similarly, except use an assignment operator instead. */
2103 tree
2104 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2106 tree info = CP_OMP_CLAUSE_INFO (clause);
2107 tree ret = NULL;
2109 if (info)
2110 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2111 if (ret == NULL)
2112 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2114 return ret;
2117 /* Return code to destroy DECL. */
2119 tree
2120 cxx_omp_clause_dtor (tree clause, tree decl)
2122 tree info = CP_OMP_CLAUSE_INFO (clause);
2123 tree ret = NULL;
2125 if (info)
2126 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2128 return ret;
2131 /* True if OpenMP should privatize what this DECL points to rather
2132 than the DECL itself. */
2134 bool
2135 cxx_omp_privatize_by_reference (const_tree decl)
2137 return (TYPE_REF_P (TREE_TYPE (decl))
2138 || is_invisiref_parm (decl));
2141 /* Return true if DECL is const qualified var having no mutable member. */
2142 bool
2143 cxx_omp_const_qual_no_mutable (tree decl)
2145 tree type = TREE_TYPE (decl);
2146 if (TYPE_REF_P (type))
2148 if (!is_invisiref_parm (decl))
2149 return false;
2150 type = TREE_TYPE (type);
2152 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2154 /* NVR doesn't preserve const qualification of the
2155 variable's type. */
2156 tree outer = outer_curly_brace_block (current_function_decl);
2157 tree var;
2159 if (outer)
2160 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2161 if (VAR_P (var)
2162 && DECL_NAME (decl) == DECL_NAME (var)
2163 && (TYPE_MAIN_VARIANT (type)
2164 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2166 if (TYPE_READONLY (TREE_TYPE (var)))
2167 type = TREE_TYPE (var);
2168 break;
2173 if (type == error_mark_node)
2174 return false;
2176 /* Variables with const-qualified type having no mutable member
2177 are predetermined shared. */
2178 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2179 return true;
2181 return false;
2184 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2185 of DECL is predetermined. */
2187 enum omp_clause_default_kind
2188 cxx_omp_predetermined_sharing_1 (tree decl)
2190 /* Static data members are predetermined shared. */
2191 if (TREE_STATIC (decl))
2193 tree ctx = CP_DECL_CONTEXT (decl);
2194 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2195 return OMP_CLAUSE_DEFAULT_SHARED;
2197 if (c_omp_predefined_variable (decl))
2198 return OMP_CLAUSE_DEFAULT_SHARED;
2201 /* this may not be specified in data-sharing clauses, still we need
2202 to predetermined it firstprivate. */
2203 if (decl == current_class_ptr)
2204 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2206 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2209 /* Likewise, but also include the artificial vars. We don't want to
2210 disallow the artificial vars being mentioned in explicit clauses,
2211 as we use artificial vars e.g. for loop constructs with random
2212 access iterators other than pointers, but during gimplification
2213 we want to treat them as predetermined. */
2215 enum omp_clause_default_kind
2216 cxx_omp_predetermined_sharing (tree decl)
2218 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2219 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2220 return ret;
2222 /* Predetermine artificial variables holding integral values, those
2223 are usually result of gimplify_one_sizepos or SAVE_EXPR
2224 gimplification. */
2225 if (VAR_P (decl)
2226 && DECL_ARTIFICIAL (decl)
2227 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2228 && !(DECL_LANG_SPECIFIC (decl)
2229 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2230 return OMP_CLAUSE_DEFAULT_SHARED;
2232 /* Similarly for typeinfo symbols. */
2233 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2234 return OMP_CLAUSE_DEFAULT_SHARED;
2236 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2239 enum omp_clause_defaultmap_kind
2240 cxx_omp_predetermined_mapping (tree decl)
2242 /* Predetermine artificial variables holding integral values, those
2243 are usually result of gimplify_one_sizepos or SAVE_EXPR
2244 gimplification. */
2245 if (VAR_P (decl)
2246 && DECL_ARTIFICIAL (decl)
2247 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2248 && !(DECL_LANG_SPECIFIC (decl)
2249 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2250 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2252 if (c_omp_predefined_variable (decl))
2253 return OMP_CLAUSE_DEFAULTMAP_TO;
2255 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2258 /* Finalize an implicitly determined clause. */
2260 void
2261 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2263 tree decl, inner_type;
2264 bool make_shared = false;
2266 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2267 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2268 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2269 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2270 return;
2272 decl = OMP_CLAUSE_DECL (c);
2273 decl = require_complete_type (decl);
2274 inner_type = TREE_TYPE (decl);
2275 if (decl == error_mark_node)
2276 make_shared = true;
2277 else if (TYPE_REF_P (TREE_TYPE (decl)))
2278 inner_type = TREE_TYPE (inner_type);
2280 /* We're interested in the base element, not arrays. */
2281 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2282 inner_type = TREE_TYPE (inner_type);
2284 /* Check for special function availability by building a call to one.
2285 Save the results, because later we won't be in the right context
2286 for making these queries. */
2287 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2288 bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2289 if (!make_shared
2290 && CLASS_TYPE_P (inner_type)
2291 && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2292 true))
2293 make_shared = true;
2295 if (make_shared)
2297 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2298 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2299 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2303 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2304 disregarded in OpenMP construct, because it is going to be
2305 remapped during OpenMP lowering. SHARED is true if DECL
2306 is going to be shared, false if it is going to be privatized. */
2308 bool
2309 cxx_omp_disregard_value_expr (tree decl, bool shared)
2311 if (shared)
2312 return false;
2313 if (VAR_P (decl)
2314 && DECL_HAS_VALUE_EXPR_P (decl)
2315 && DECL_ARTIFICIAL (decl)
2316 && DECL_LANG_SPECIFIC (decl)
2317 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2318 return true;
2319 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2320 return true;
2321 return false;
2324 /* Fold expression X which is used as an rvalue if RVAL is true. */
2326 tree
2327 cp_fold_maybe_rvalue (tree x, bool rval)
2329 while (true)
2331 x = cp_fold (x);
2332 if (rval)
2333 x = mark_rvalue_use (x);
2334 if (rval && DECL_P (x)
2335 && !TYPE_REF_P (TREE_TYPE (x)))
2337 tree v = decl_constant_value (x);
2338 if (v != x && v != error_mark_node)
2340 x = v;
2341 continue;
2344 break;
2346 return x;
2349 /* Fold expression X which is used as an rvalue. */
2351 tree
2352 cp_fold_rvalue (tree x)
2354 return cp_fold_maybe_rvalue (x, true);
2357 /* Perform folding on expression X. */
2359 tree
2360 cp_fully_fold (tree x)
2362 if (processing_template_decl)
2363 return x;
2364 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2365 have to call both. */
2366 if (cxx_dialect >= cxx11)
2368 x = maybe_constant_value (x);
2369 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2370 a TARGET_EXPR; undo that here. */
2371 if (TREE_CODE (x) == TARGET_EXPR)
2372 x = TARGET_EXPR_INITIAL (x);
2373 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2374 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2375 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2376 x = TREE_OPERAND (x, 0);
2378 return cp_fold_rvalue (x);
2381 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2382 in some cases. */
2384 tree
2385 cp_fully_fold_init (tree x)
2387 if (processing_template_decl)
2388 return x;
2389 x = cp_fully_fold (x);
2390 cp_fold_data data (/*genericize*/false);
2391 cp_walk_tree (&x, cp_fold_r, &data, NULL);
2392 return x;
2395 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2396 and certain changes are made to the folding done. Or should be (FIXME). We
2397 never touch maybe_const, as it is only used for the C front-end
2398 C_MAYBE_CONST_EXPR. */
2400 tree
2401 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2403 return cp_fold_maybe_rvalue (x, !lval);
2406 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2408 /* Dispose of the whole FOLD_CACHE. */
2410 void
2411 clear_fold_cache (void)
2413 if (fold_cache != NULL)
2414 fold_cache->empty ();
2417 /* This function tries to fold an expression X.
2418 To avoid combinatorial explosion, folding results are kept in fold_cache.
2419 If X is invalid, we don't fold at all.
2420 For performance reasons we don't cache expressions representing a
2421 declaration or constant.
2422 Function returns X or its folded variant. */
2424 static tree
2425 cp_fold (tree x)
2427 tree op0, op1, op2, op3;
2428 tree org_x = x, r = NULL_TREE;
2429 enum tree_code code;
2430 location_t loc;
2431 bool rval_ops = true;
2433 if (!x || x == error_mark_node)
2434 return x;
2436 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2437 return x;
2439 /* Don't bother to cache DECLs or constants. */
2440 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2441 return x;
2443 if (fold_cache == NULL)
2444 fold_cache = hash_map<tree, tree>::create_ggc (101);
2446 if (tree *cached = fold_cache->get (x))
2447 return *cached;
2449 uid_sensitive_constexpr_evaluation_checker c;
2451 code = TREE_CODE (x);
2452 switch (code)
2454 case CLEANUP_POINT_EXPR:
2455 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2456 effects. */
2457 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2458 if (!TREE_SIDE_EFFECTS (r))
2459 x = r;
2460 break;
2462 case SIZEOF_EXPR:
2463 x = fold_sizeof_expr (x);
2464 break;
2466 case VIEW_CONVERT_EXPR:
2467 rval_ops = false;
2468 /* FALLTHRU */
2469 case NON_LVALUE_EXPR:
2470 CASE_CONVERT:
2472 if (VOID_TYPE_P (TREE_TYPE (x)))
2474 /* This is just to make sure we don't end up with casts to
2475 void from error_mark_node. If we just return x, then
2476 cp_fold_r might fold the operand into error_mark_node and
2477 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2478 during gimplification doesn't like such casts.
2479 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2480 folding of the operand should be in the caches and if in cp_fold_r
2481 it will modify it in place. */
2482 op0 = cp_fold (TREE_OPERAND (x, 0));
2483 if (op0 == error_mark_node)
2484 x = error_mark_node;
2485 break;
2488 loc = EXPR_LOCATION (x);
2489 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2491 if (code == CONVERT_EXPR
2492 && SCALAR_TYPE_P (TREE_TYPE (x))
2493 && op0 != void_node)
2494 /* During parsing we used convert_to_*_nofold; re-convert now using the
2495 folding variants, since fold() doesn't do those transformations. */
2496 x = fold (convert (TREE_TYPE (x), op0));
2497 else if (op0 != TREE_OPERAND (x, 0))
2499 if (op0 == error_mark_node)
2500 x = error_mark_node;
2501 else
2502 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2504 else
2505 x = fold (x);
2507 /* Conversion of an out-of-range value has implementation-defined
2508 behavior; the language considers it different from arithmetic
2509 overflow, which is undefined. */
2510 if (TREE_CODE (op0) == INTEGER_CST
2511 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2512 TREE_OVERFLOW (x) = false;
2514 break;
2516 case INDIRECT_REF:
2517 /* We don't need the decltype(auto) obfuscation anymore. */
2518 if (REF_PARENTHESIZED_P (x))
2520 tree p = maybe_undo_parenthesized_ref (x);
2521 if (p != x)
2522 return cp_fold (p);
2524 goto unary;
2526 case ADDR_EXPR:
2527 loc = EXPR_LOCATION (x);
2528 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2530 /* Cope with user tricks that amount to offsetof. */
2531 if (op0 != error_mark_node
2532 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2534 tree val = get_base_address (op0);
2535 if (val
2536 && INDIRECT_REF_P (val)
2537 && COMPLETE_TYPE_P (TREE_TYPE (val))
2538 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2540 val = TREE_OPERAND (val, 0);
2541 STRIP_NOPS (val);
2542 val = maybe_constant_value (val);
2543 if (TREE_CODE (val) == INTEGER_CST)
2544 return fold_offsetof (op0, TREE_TYPE (x));
2547 goto finish_unary;
2549 case REALPART_EXPR:
2550 case IMAGPART_EXPR:
2551 rval_ops = false;
2552 /* FALLTHRU */
2553 case CONJ_EXPR:
2554 case FIX_TRUNC_EXPR:
2555 case FLOAT_EXPR:
2556 case NEGATE_EXPR:
2557 case ABS_EXPR:
2558 case ABSU_EXPR:
2559 case BIT_NOT_EXPR:
2560 case TRUTH_NOT_EXPR:
2561 case FIXED_CONVERT_EXPR:
2562 unary:
2564 loc = EXPR_LOCATION (x);
2565 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2567 finish_unary:
2568 if (op0 != TREE_OPERAND (x, 0))
2570 if (op0 == error_mark_node)
2571 x = error_mark_node;
2572 else
2574 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2575 if (code == INDIRECT_REF
2576 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2578 TREE_READONLY (x) = TREE_READONLY (org_x);
2579 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2580 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2584 else
2585 x = fold (x);
2587 gcc_assert (TREE_CODE (x) != COND_EXPR
2588 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2589 break;
2591 case UNARY_PLUS_EXPR:
2592 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2593 if (op0 == error_mark_node)
2594 x = error_mark_node;
2595 else
2596 x = fold_convert (TREE_TYPE (x), op0);
2597 break;
2599 case POSTDECREMENT_EXPR:
2600 case POSTINCREMENT_EXPR:
2601 case INIT_EXPR:
2602 case PREDECREMENT_EXPR:
2603 case PREINCREMENT_EXPR:
2604 case COMPOUND_EXPR:
2605 case MODIFY_EXPR:
2606 rval_ops = false;
2607 /* FALLTHRU */
2608 case POINTER_PLUS_EXPR:
2609 case PLUS_EXPR:
2610 case POINTER_DIFF_EXPR:
2611 case MINUS_EXPR:
2612 case MULT_EXPR:
2613 case TRUNC_DIV_EXPR:
2614 case CEIL_DIV_EXPR:
2615 case FLOOR_DIV_EXPR:
2616 case ROUND_DIV_EXPR:
2617 case TRUNC_MOD_EXPR:
2618 case CEIL_MOD_EXPR:
2619 case ROUND_MOD_EXPR:
2620 case RDIV_EXPR:
2621 case EXACT_DIV_EXPR:
2622 case MIN_EXPR:
2623 case MAX_EXPR:
2624 case LSHIFT_EXPR:
2625 case RSHIFT_EXPR:
2626 case LROTATE_EXPR:
2627 case RROTATE_EXPR:
2628 case BIT_AND_EXPR:
2629 case BIT_IOR_EXPR:
2630 case BIT_XOR_EXPR:
2631 case TRUTH_AND_EXPR:
2632 case TRUTH_ANDIF_EXPR:
2633 case TRUTH_OR_EXPR:
2634 case TRUTH_ORIF_EXPR:
2635 case TRUTH_XOR_EXPR:
2636 case LT_EXPR: case LE_EXPR:
2637 case GT_EXPR: case GE_EXPR:
2638 case EQ_EXPR: case NE_EXPR:
2639 case UNORDERED_EXPR: case ORDERED_EXPR:
2640 case UNLT_EXPR: case UNLE_EXPR:
2641 case UNGT_EXPR: case UNGE_EXPR:
2642 case UNEQ_EXPR: case LTGT_EXPR:
2643 case RANGE_EXPR: case COMPLEX_EXPR:
2645 loc = EXPR_LOCATION (x);
2646 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2647 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2649 /* decltype(nullptr) has only one value, so optimize away all comparisons
2650 with that type right away, keeping them in the IL causes troubles for
2651 various optimizations. */
2652 if (COMPARISON_CLASS_P (org_x)
2653 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2654 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2656 switch (code)
2658 case EQ_EXPR:
2659 x = constant_boolean_node (true, TREE_TYPE (x));
2660 break;
2661 case NE_EXPR:
2662 x = constant_boolean_node (false, TREE_TYPE (x));
2663 break;
2664 default:
2665 gcc_unreachable ();
2667 return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2668 op0, op1);
2671 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2673 if (op0 == error_mark_node || op1 == error_mark_node)
2674 x = error_mark_node;
2675 else
2676 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2678 else
2679 x = fold (x);
2681 /* This is only needed for -Wnonnull-compare and only if
2682 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2683 generation, we do it always. */
2684 if (COMPARISON_CLASS_P (org_x))
2686 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2688 else if (COMPARISON_CLASS_P (x))
2690 if (warn_nonnull_compare
2691 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2692 suppress_warning (x, OPT_Wnonnull_compare);
2694 /* Otherwise give up on optimizing these, let GIMPLE folders
2695 optimize those later on. */
2696 else if (op0 != TREE_OPERAND (org_x, 0)
2697 || op1 != TREE_OPERAND (org_x, 1))
2699 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2700 if (warn_nonnull_compare
2701 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2702 suppress_warning (x, OPT_Wnonnull_compare);
2704 else
2705 x = org_x;
2708 break;
2710 case VEC_COND_EXPR:
2711 case COND_EXPR:
2712 loc = EXPR_LOCATION (x);
2713 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2714 op1 = cp_fold (TREE_OPERAND (x, 1));
2715 op2 = cp_fold (TREE_OPERAND (x, 2));
2717 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2719 warning_sentinel s (warn_int_in_bool_context);
2720 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2721 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2722 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2723 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2725 else if (VOID_TYPE_P (TREE_TYPE (x)))
2727 if (TREE_CODE (op0) == INTEGER_CST)
2729 /* If the condition is constant, fold can fold away
2730 the COND_EXPR. If some statement-level uses of COND_EXPR
2731 have one of the branches NULL, avoid folding crash. */
2732 if (!op1)
2733 op1 = build_empty_stmt (loc);
2734 if (!op2)
2735 op2 = build_empty_stmt (loc);
2737 else
2739 /* Otherwise, don't bother folding a void condition, since
2740 it can't produce a constant value. */
2741 if (op0 != TREE_OPERAND (x, 0)
2742 || op1 != TREE_OPERAND (x, 1)
2743 || op2 != TREE_OPERAND (x, 2))
2744 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2745 break;
2749 if (op0 != TREE_OPERAND (x, 0)
2750 || op1 != TREE_OPERAND (x, 1)
2751 || op2 != TREE_OPERAND (x, 2))
2753 if (op0 == error_mark_node
2754 || op1 == error_mark_node
2755 || op2 == error_mark_node)
2756 x = error_mark_node;
2757 else
2758 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2760 else
2761 x = fold (x);
2763 /* A COND_EXPR might have incompatible types in branches if one or both
2764 arms are bitfields. If folding exposed such a branch, fix it up. */
2765 if (TREE_CODE (x) != code
2766 && x != error_mark_node
2767 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2768 x = fold_convert (TREE_TYPE (org_x), x);
2770 break;
2772 case CALL_EXPR:
2774 tree callee = get_callee_fndecl (x);
2776 /* "Inline" calls to std::move/forward and other cast-like functions
2777 by simply folding them into a corresponding cast to their return
2778 type. This is cheaper than relying on the middle end to do so, and
2779 also means we avoid generating useless debug info for them at all.
2781 At this point the argument has already been converted into a
2782 reference, so it suffices to use a NOP_EXPR to express the
2783 cast. */
2784 if ((OPTION_SET_P (flag_fold_simple_inlines)
2785 ? flag_fold_simple_inlines
2786 : !flag_no_inline)
2787 && call_expr_nargs (x) == 1
2788 && decl_in_std_namespace_p (callee)
2789 && DECL_NAME (callee) != NULL_TREE
2790 && (id_equal (DECL_NAME (callee), "move")
2791 || id_equal (DECL_NAME (callee), "forward")
2792 || id_equal (DECL_NAME (callee), "addressof")
2793 /* This addressof equivalent is used heavily in libstdc++. */
2794 || id_equal (DECL_NAME (callee), "__addressof")
2795 || id_equal (DECL_NAME (callee), "as_const")))
2797 r = CALL_EXPR_ARG (x, 0);
2798 /* Check that the return and argument types are sane before
2799 folding. */
2800 if (INDIRECT_TYPE_P (TREE_TYPE (x))
2801 && INDIRECT_TYPE_P (TREE_TYPE (r)))
2803 if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
2804 r = build_nop (TREE_TYPE (x), r);
2805 x = cp_fold (r);
2806 break;
2810 int sv = optimize, nw = sv;
2812 /* Some built-in function calls will be evaluated at compile-time in
2813 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2814 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2815 if (callee && fndecl_built_in_p (callee) && !optimize
2816 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2817 && current_function_decl
2818 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2819 nw = 1;
2821 if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2823 switch (DECL_FE_FUNCTION_CODE (callee))
2825 /* Defer folding __builtin_is_constant_evaluated. */
2826 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2827 break;
2828 case CP_BUILT_IN_SOURCE_LOCATION:
2829 x = fold_builtin_source_location (EXPR_LOCATION (x));
2830 break;
2831 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2832 x = fold_builtin_is_corresponding_member
2833 (EXPR_LOCATION (x), call_expr_nargs (x),
2834 &CALL_EXPR_ARG (x, 0));
2835 break;
2836 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2837 x = fold_builtin_is_pointer_inverconvertible_with_class
2838 (EXPR_LOCATION (x), call_expr_nargs (x),
2839 &CALL_EXPR_ARG (x, 0));
2840 break;
2841 default:
2842 break;
2844 break;
2847 if (callee
2848 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2849 BUILT_IN_FRONTEND))
2851 x = fold_builtin_source_location (EXPR_LOCATION (x));
2852 break;
2855 bool changed = false;
2856 int m = call_expr_nargs (x);
2857 for (int i = 0; i < m; i++)
2859 r = cp_fold (CALL_EXPR_ARG (x, i));
2860 if (r != CALL_EXPR_ARG (x, i))
2862 if (r == error_mark_node)
2864 x = error_mark_node;
2865 break;
2867 if (!changed)
2868 x = copy_node (x);
2869 CALL_EXPR_ARG (x, i) = r;
2870 changed = true;
2873 if (x == error_mark_node)
2874 break;
2876 optimize = nw;
2877 r = fold (x);
2878 optimize = sv;
2880 if (TREE_CODE (r) != CALL_EXPR)
2882 x = cp_fold (r);
2883 break;
2886 optimize = nw;
2888 /* Invoke maybe_constant_value for functions declared
2889 constexpr and not called with AGGR_INIT_EXPRs.
2890 TODO:
2891 Do constexpr expansion of expressions where the call itself is not
2892 constant, but the call followed by an INDIRECT_REF is. */
2893 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2894 && !flag_no_inline)
2895 r = maybe_constant_value (x);
2896 optimize = sv;
2898 if (TREE_CODE (r) != CALL_EXPR)
2900 if (DECL_CONSTRUCTOR_P (callee))
2902 loc = EXPR_LOCATION (x);
2903 tree s = build_fold_indirect_ref_loc (loc,
2904 CALL_EXPR_ARG (x, 0));
2905 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2907 x = r;
2908 break;
2911 break;
2914 case CONSTRUCTOR:
2916 unsigned i;
2917 constructor_elt *p;
2918 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2919 vec<constructor_elt, va_gc> *nelts = NULL;
2920 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2922 tree op = cp_fold (p->value);
2923 if (op != p->value)
2925 if (op == error_mark_node)
2927 x = error_mark_node;
2928 vec_free (nelts);
2929 break;
2931 if (nelts == NULL)
2932 nelts = elts->copy ();
2933 (*nelts)[i].value = op;
2936 if (nelts)
2938 x = build_constructor (TREE_TYPE (x), nelts);
2939 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2940 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2942 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2943 x = fold (x);
2944 break;
2946 case TREE_VEC:
2948 bool changed = false;
2949 int n = TREE_VEC_LENGTH (x);
2951 for (int i = 0; i < n; i++)
2953 tree op = cp_fold (TREE_VEC_ELT (x, i));
2954 if (op != TREE_VEC_ELT (x, i))
2956 if (!changed)
2957 x = copy_node (x);
2958 TREE_VEC_ELT (x, i) = op;
2959 changed = true;
2964 break;
2966 case ARRAY_REF:
2967 case ARRAY_RANGE_REF:
2969 loc = EXPR_LOCATION (x);
2970 op0 = cp_fold (TREE_OPERAND (x, 0));
2971 op1 = cp_fold (TREE_OPERAND (x, 1));
2972 op2 = cp_fold (TREE_OPERAND (x, 2));
2973 op3 = cp_fold (TREE_OPERAND (x, 3));
2975 if (op0 != TREE_OPERAND (x, 0)
2976 || op1 != TREE_OPERAND (x, 1)
2977 || op2 != TREE_OPERAND (x, 2)
2978 || op3 != TREE_OPERAND (x, 3))
2980 if (op0 == error_mark_node
2981 || op1 == error_mark_node
2982 || op2 == error_mark_node
2983 || op3 == error_mark_node)
2984 x = error_mark_node;
2985 else
2987 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2988 TREE_READONLY (x) = TREE_READONLY (org_x);
2989 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2990 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2994 x = fold (x);
2995 break;
2997 case SAVE_EXPR:
2998 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2999 folding, evaluates to an invariant. In that case no need to wrap
3000 this folded tree with a SAVE_EXPR. */
3001 r = cp_fold (TREE_OPERAND (x, 0));
3002 if (tree_invariant_p (r))
3003 x = r;
3004 break;
3006 case REQUIRES_EXPR:
3007 x = evaluate_requires_expr (x);
3008 break;
3010 default:
3011 return org_x;
3014 if (EXPR_P (x) && TREE_CODE (x) == code)
3016 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3017 copy_warning (x, org_x);
3020 if (!c.evaluation_restricted_p ())
3022 fold_cache->put (org_x, x);
3023 /* Prevent that we try to fold an already folded result again. */
3024 if (x != org_x)
3025 fold_cache->put (x, x);
3028 return x;
3031 /* Look up either "hot" or "cold" in attribute list LIST. */
3033 tree
3034 lookup_hotness_attribute (tree list)
3036 for (; list; list = TREE_CHAIN (list))
3038 tree name = get_attribute_name (list);
3039 if (is_attribute_p ("hot", name)
3040 || is_attribute_p ("cold", name)
3041 || is_attribute_p ("likely", name)
3042 || is_attribute_p ("unlikely", name))
3043 break;
3045 return list;
3048 /* Remove both "hot" and "cold" attributes from LIST. */
3050 static tree
3051 remove_hotness_attribute (tree list)
3053 list = remove_attribute ("hot", list);
3054 list = remove_attribute ("cold", list);
3055 list = remove_attribute ("likely", list);
3056 list = remove_attribute ("unlikely", list);
3057 return list;
3060 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3061 PREDICT_EXPR. */
3063 tree
3064 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3066 if (std_attrs == error_mark_node)
3067 return std_attrs;
3068 if (tree attr = lookup_hotness_attribute (std_attrs))
3070 tree name = get_attribute_name (attr);
3071 bool hot = (is_attribute_p ("hot", name)
3072 || is_attribute_p ("likely", name));
3073 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3074 hot ? TAKEN : NOT_TAKEN);
3075 SET_EXPR_LOCATION (pred, attrs_loc);
3076 add_stmt (pred);
3077 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3078 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3079 get_attribute_name (other), name);
3080 std_attrs = remove_hotness_attribute (std_attrs);
3082 return std_attrs;
3085 /* If [[assume (cond)]] appears on this statement, handle it. */
3087 tree
3088 process_stmt_assume_attribute (tree std_attrs, tree statement,
3089 location_t attrs_loc)
3091 if (std_attrs == error_mark_node)
3092 return std_attrs;
3093 tree attr = lookup_attribute ("gnu", "assume", std_attrs);
3094 if (!attr)
3095 return std_attrs;
3096 /* The next token after the assume attribute is not ';'. */
3097 if (statement)
3099 warning_at (attrs_loc, OPT_Wattributes,
3100 "%<assume%> attribute not followed by %<;%>");
3101 attr = NULL_TREE;
3103 for (; attr; attr = lookup_attribute ("gnu", "assume", TREE_CHAIN (attr)))
3105 tree args = TREE_VALUE (attr);
3106 int nargs = list_length (args);
3107 if (nargs != 1)
3109 auto_diagnostic_group d;
3110 error_at (attrs_loc, "wrong number of arguments specified for "
3111 "%qE attribute", get_attribute_name (attr));
3112 inform (attrs_loc, "expected %i, found %i", 1, nargs);
3114 else
3116 tree arg = TREE_VALUE (args);
3117 if (!type_dependent_expression_p (arg))
3118 arg = contextual_conv_bool (arg, tf_warning_or_error);
3119 if (error_operand_p (arg))
3120 continue;
3121 statement = build_call_expr_internal_loc (attrs_loc, IFN_ASSUME,
3122 void_type_node, 1, arg);
3123 finish_expr_stmt (statement);
3126 return remove_attribute ("gnu", "assume", std_attrs);
3129 /* Helper of fold_builtin_source_location, return the
3130 std::source_location::__impl type after performing verification
3131 on it. LOC is used for reporting any errors. */
3133 static tree
3134 get_source_location_impl_type (location_t loc)
3136 tree name = get_identifier ("source_location");
3137 tree decl = lookup_qualified_name (std_node, name);
3138 if (TREE_CODE (decl) != TYPE_DECL)
3140 auto_diagnostic_group d;
3141 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3142 qualified_name_lookup_error (std_node, name, decl, loc);
3143 else
3144 error_at (loc, "%qD is not a type", decl);
3145 return error_mark_node;
3147 name = get_identifier ("__impl");
3148 tree type = TREE_TYPE (decl);
3149 decl = lookup_qualified_name (type, name);
3150 if (TREE_CODE (decl) != TYPE_DECL)
3152 auto_diagnostic_group d;
3153 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3154 qualified_name_lookup_error (type, name, decl, loc);
3155 else
3156 error_at (loc, "%qD is not a type", decl);
3157 return error_mark_node;
3159 type = TREE_TYPE (decl);
3160 if (TREE_CODE (type) != RECORD_TYPE)
3162 error_at (loc, "%qD is not a class type", decl);
3163 return error_mark_node;
3166 int cnt = 0;
3167 for (tree field = TYPE_FIELDS (type);
3168 (field = next_aggregate_field (field)) != NULL_TREE;
3169 field = DECL_CHAIN (field))
3171 if (DECL_NAME (field) != NULL_TREE)
3173 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3174 if (strcmp (n, "_M_file_name") == 0
3175 || strcmp (n, "_M_function_name") == 0)
3177 if (TREE_TYPE (field) != const_string_type_node)
3179 error_at (loc, "%qD does not have %<const char *%> type",
3180 field);
3181 return error_mark_node;
3183 cnt++;
3184 continue;
3186 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3188 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3190 error_at (loc, "%qD does not have integral type", field);
3191 return error_mark_node;
3193 cnt++;
3194 continue;
3197 cnt = 0;
3198 break;
3200 if (cnt != 4)
3202 error_at (loc, "%<std::source_location::__impl%> does not contain only "
3203 "non-static data members %<_M_file_name%>, "
3204 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3205 return error_mark_node;
3207 return build_qualified_type (type, TYPE_QUAL_CONST);
3210 /* Type for source_location_table hash_set. */
3211 struct GTY((for_user)) source_location_table_entry {
3212 location_t loc;
3213 unsigned uid;
3214 tree var;
3217 /* Traits class for function start hash maps below. */
3219 struct source_location_table_entry_hash
3220 : ggc_remove <source_location_table_entry>
3222 typedef source_location_table_entry value_type;
3223 typedef source_location_table_entry compare_type;
3225 static hashval_t
3226 hash (const source_location_table_entry &ref)
3228 inchash::hash hstate (0);
3229 hstate.add_int (ref.loc);
3230 hstate.add_int (ref.uid);
3231 return hstate.end ();
3234 static bool
3235 equal (const source_location_table_entry &ref1,
3236 const source_location_table_entry &ref2)
3238 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3241 static void
3242 mark_deleted (source_location_table_entry &ref)
3244 ref.loc = UNKNOWN_LOCATION;
3245 ref.uid = -1U;
3246 ref.var = NULL_TREE;
3249 static const bool empty_zero_p = true;
3251 static void
3252 mark_empty (source_location_table_entry &ref)
3254 ref.loc = UNKNOWN_LOCATION;
3255 ref.uid = 0;
3256 ref.var = NULL_TREE;
3259 static bool
3260 is_deleted (const source_location_table_entry &ref)
3262 return (ref.loc == UNKNOWN_LOCATION
3263 && ref.uid == -1U
3264 && ref.var == NULL_TREE);
3267 static bool
3268 is_empty (const source_location_table_entry &ref)
3270 return (ref.loc == UNKNOWN_LOCATION
3271 && ref.uid == 0
3272 && ref.var == NULL_TREE);
3275 static void
3276 pch_nx (source_location_table_entry &p)
3278 extern void gt_pch_nx (source_location_table_entry &);
3279 gt_pch_nx (p);
3282 static void
3283 pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3285 extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3286 void *);
3287 gt_pch_nx (&p, op, cookie);
3291 static GTY(()) hash_table <source_location_table_entry_hash>
3292 *source_location_table;
3293 static GTY(()) unsigned int source_location_id;
3295 /* Fold __builtin_source_location () call. LOC is the location
3296 of the call. */
3298 tree
3299 fold_builtin_source_location (location_t loc)
3301 if (source_location_impl == NULL_TREE)
3303 auto_diagnostic_group d;
3304 source_location_impl = get_source_location_impl_type (loc);
3305 if (source_location_impl == error_mark_node)
3306 inform (loc, "evaluating %qs", "__builtin_source_location");
3308 if (source_location_impl == error_mark_node)
3309 return build_zero_cst (const_ptr_type_node);
3310 if (source_location_table == NULL)
3311 source_location_table
3312 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3313 const line_map_ordinary *map;
3314 source_location_table_entry entry;
3315 entry.loc
3316 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3317 &map);
3318 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3319 entry.var = error_mark_node;
3320 source_location_table_entry *entryp
3321 = source_location_table->find_slot (entry, INSERT);
3322 tree var;
3323 if (entryp->var)
3324 var = entryp->var;
3325 else
3327 char tmp_name[32];
3328 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3329 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3330 source_location_impl);
3331 TREE_STATIC (var) = 1;
3332 TREE_PUBLIC (var) = 0;
3333 DECL_ARTIFICIAL (var) = 1;
3334 DECL_IGNORED_P (var) = 1;
3335 DECL_EXTERNAL (var) = 0;
3336 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3337 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3338 layout_decl (var, 0);
3340 vec<constructor_elt, va_gc> *v = NULL;
3341 vec_alloc (v, 4);
3342 for (tree field = TYPE_FIELDS (source_location_impl);
3343 (field = next_aggregate_field (field)) != NULL_TREE;
3344 field = DECL_CHAIN (field))
3346 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3347 tree val = NULL_TREE;
3348 if (strcmp (n, "_M_file_name") == 0)
3350 if (const char *fname = LOCATION_FILE (loc))
3352 fname = remap_macro_filename (fname);
3353 val = build_string_literal (strlen (fname) + 1, fname);
3355 else
3356 val = build_string_literal (1, "");
3358 else if (strcmp (n, "_M_function_name") == 0)
3360 const char *name = "";
3362 if (current_function_decl)
3363 name = cxx_printable_name (current_function_decl, 2);
3365 val = build_string_literal (strlen (name) + 1, name);
3367 else if (strcmp (n, "_M_line") == 0)
3368 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3369 else if (strcmp (n, "_M_column") == 0)
3370 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3371 else
3372 gcc_unreachable ();
3373 CONSTRUCTOR_APPEND_ELT (v, field, val);
3376 tree ctor = build_constructor (source_location_impl, v);
3377 TREE_CONSTANT (ctor) = 1;
3378 TREE_STATIC (ctor) = 1;
3379 DECL_INITIAL (var) = ctor;
3380 varpool_node::finalize_decl (var);
3381 *entryp = entry;
3382 entryp->var = var;
3385 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3388 #include "gt-cp-cp-gimplify.h"