1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
35 #include "cp-cilkplus.h"
36 #include "stringpool.h"
40 /* Forward declarations. */
42 static tree
cp_genericize_r (tree
*, int *, void *);
43 static tree
cp_fold_r (tree
*, int *, void *);
44 static void cp_genericize_tree (tree
*, bool);
45 static tree
cp_fold (tree
);
47 /* Local declarations. */
49 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
51 /* Stack of labels which are targets for "break" or "continue",
52 linked through TREE_CHAIN. */
53 static tree bc_label
[2];
55 /* Begin a scope which can be exited by a break or continue statement. BC
58 Just creates a label with location LOCATION and pushes it into the current
62 begin_bc_block (enum bc_t bc
, location_t location
)
64 tree label
= create_artificial_label (location
);
65 DECL_CHAIN (label
) = bc_label
[bc
];
68 LABEL_DECL_BREAK (label
) = true;
70 LABEL_DECL_CONTINUE (label
) = true;
74 /* Finish a scope which can be exited by a break or continue statement.
75 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
76 an expression for the contents of the scope.
78 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
79 BLOCK. Otherwise, just forget the label. */
82 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
84 gcc_assert (label
== bc_label
[bc
]);
86 if (TREE_USED (label
))
87 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
90 bc_label
[bc
] = DECL_CHAIN (label
);
91 DECL_CHAIN (label
) = NULL_TREE
;
94 /* Get the LABEL_EXPR to represent a break or continue statement
95 in the current block scope. BC indicates which. */
98 get_bc_label (enum bc_t bc
)
100 tree label
= bc_label
[bc
];
102 /* Mark the label used for finish_bc_block. */
103 TREE_USED (label
) = 1;
107 /* Genericize a TRY_BLOCK. */
110 genericize_try_block (tree
*stmt_p
)
112 tree body
= TRY_STMTS (*stmt_p
);
113 tree cleanup
= TRY_HANDLERS (*stmt_p
);
115 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
118 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
121 genericize_catch_block (tree
*stmt_p
)
123 tree type
= HANDLER_TYPE (*stmt_p
);
124 tree body
= HANDLER_BODY (*stmt_p
);
126 /* FIXME should the caught type go in TREE_TYPE? */
127 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
130 /* A terser interface for building a representation of an exception
134 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
138 /* FIXME should the allowed types go in TREE_TYPE? */
139 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
140 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
142 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
143 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
148 /* Genericize an EH_SPEC_BLOCK by converting it to a
149 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
152 genericize_eh_spec_block (tree
*stmt_p
)
154 tree body
= EH_SPEC_STMTS (*stmt_p
);
155 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
156 tree failure
= build_call_n (call_unexpected_fn
, 1, build_exc_ptr ());
158 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
159 TREE_NO_WARNING (*stmt_p
) = true;
160 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
163 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
166 genericize_if_stmt (tree
*stmt_p
)
168 tree stmt
, cond
, then_
, else_
;
169 location_t locus
= EXPR_LOCATION (*stmt_p
);
172 cond
= IF_COND (stmt
);
173 then_
= THEN_CLAUSE (stmt
);
174 else_
= ELSE_CLAUSE (stmt
);
177 then_
= build_empty_stmt (locus
);
179 else_
= build_empty_stmt (locus
);
181 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
183 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
186 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
187 if (!EXPR_HAS_LOCATION (stmt
))
188 protected_set_expr_location (stmt
, locus
);
192 /* Build a generic representation of one of the C loop forms. COND is the
193 loop condition or NULL_TREE. BODY is the (possibly compound) statement
194 controlled by the loop. INCR is the increment expression of a for-loop,
195 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
196 evaluated before the loop body as in while and for loops, or after the
197 loop body as in do-while loops. */
200 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
201 tree incr
, bool cond_is_first
, int *walk_subtrees
,
206 tree stmt_list
= NULL
;
208 blab
= begin_bc_block (bc_break
, start_locus
);
209 clab
= begin_bc_block (bc_continue
, start_locus
);
211 protected_set_expr_location (incr
, start_locus
);
213 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
214 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
215 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
218 if (cond
&& TREE_CODE (cond
) != INTEGER_CST
)
220 /* If COND is constant, don't bother building an exit. If it's false,
221 we won't build a loop. If it's true, any exits are in the body. */
222 location_t cloc
= EXPR_LOC_OR_LOC (cond
, start_locus
);
223 exit
= build1_loc (cloc
, GOTO_EXPR
, void_type_node
,
224 get_bc_label (bc_break
));
225 exit
= fold_build3_loc (cloc
, COND_EXPR
, void_type_node
, cond
,
226 build_empty_stmt (cloc
), exit
);
229 if (exit
&& cond_is_first
)
230 append_to_statement_list (exit
, &stmt_list
);
231 append_to_statement_list (body
, &stmt_list
);
232 finish_bc_block (&stmt_list
, bc_continue
, clab
);
233 append_to_statement_list (incr
, &stmt_list
);
234 if (exit
&& !cond_is_first
)
235 append_to_statement_list (exit
, &stmt_list
);
238 stmt_list
= build_empty_stmt (start_locus
);
241 if (cond
&& integer_zerop (cond
))
244 loop
= fold_build3_loc (start_locus
, COND_EXPR
,
245 void_type_node
, cond
, stmt_list
,
246 build_empty_stmt (start_locus
));
252 location_t loc
= start_locus
;
253 if (!cond
|| integer_nonzerop (cond
))
254 loc
= EXPR_LOCATION (expr_first (body
));
255 if (loc
== UNKNOWN_LOCATION
)
257 loop
= build1_loc (loc
, LOOP_EXPR
, void_type_node
, stmt_list
);
261 append_to_statement_list (loop
, &stmt_list
);
262 finish_bc_block (&stmt_list
, bc_break
, blab
);
264 stmt_list
= build_empty_stmt (start_locus
);
269 /* Genericize a FOR_STMT node *STMT_P. */
272 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
277 tree init
= FOR_INIT_STMT (stmt
);
281 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
282 append_to_statement_list (init
, &expr
);
285 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
286 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
287 append_to_statement_list (loop
, &expr
);
288 if (expr
== NULL_TREE
)
293 /* Genericize a WHILE_STMT node *STMT_P. */
296 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
299 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
300 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
303 /* Genericize a DO_STMT node *STMT_P. */
306 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
309 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
310 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
313 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
316 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
319 tree break_block
, body
, cond
, type
;
320 location_t stmt_locus
= EXPR_LOCATION (stmt
);
322 break_block
= begin_bc_block (bc_break
, stmt_locus
);
324 body
= SWITCH_STMT_BODY (stmt
);
326 body
= build_empty_stmt (stmt_locus
);
327 cond
= SWITCH_STMT_COND (stmt
);
328 type
= SWITCH_STMT_TYPE (stmt
);
330 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
331 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
332 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
335 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
336 finish_bc_block (stmt_p
, bc_break
, break_block
);
339 /* Genericize a CONTINUE_STMT node *STMT_P. */
342 genericize_continue_stmt (tree
*stmt_p
)
344 tree stmt_list
= NULL
;
345 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
346 tree label
= get_bc_label (bc_continue
);
347 location_t location
= EXPR_LOCATION (*stmt_p
);
348 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
349 append_to_statement_list_force (pred
, &stmt_list
);
350 append_to_statement_list (jump
, &stmt_list
);
354 /* Genericize a BREAK_STMT node *STMT_P. */
357 genericize_break_stmt (tree
*stmt_p
)
359 tree label
= get_bc_label (bc_break
);
360 location_t location
= EXPR_LOCATION (*stmt_p
);
361 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
364 /* Genericize a OMP_FOR node *STMT_P. */
367 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
370 location_t locus
= EXPR_LOCATION (stmt
);
371 tree clab
= begin_bc_block (bc_continue
, locus
);
373 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
374 if (TREE_CODE (stmt
) != OMP_TASKLOOP
)
375 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
376 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
377 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
378 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
379 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
382 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
385 /* Hook into the middle of gimplifying an OMP_FOR node. */
387 static enum gimplify_status
388 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
390 tree for_stmt
= *expr_p
;
391 gimple_seq seq
= NULL
;
393 /* Protect ourselves from recursion. */
394 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
396 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
398 gimplify_and_add (for_stmt
, &seq
);
399 gimple_seq_add_seq (pre_p
, seq
);
401 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
406 /* Gimplify an EXPR_STMT node. */
409 gimplify_expr_stmt (tree
*stmt_p
)
411 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
413 if (stmt
== error_mark_node
)
416 /* Gimplification of a statement expression will nullify the
417 statement if all its side effects are moved to *PRE_P and *POST_P.
419 In this case we will not want to emit the gimplified statement.
420 However, we may still want to emit a warning, so we do that before
422 if (stmt
&& warn_unused_value
)
424 if (!TREE_SIDE_EFFECTS (stmt
))
426 if (!IS_EMPTY_STMT (stmt
)
427 && !VOID_TYPE_P (TREE_TYPE (stmt
))
428 && !TREE_NO_WARNING (stmt
))
429 warning (OPT_Wunused_value
, "statement with no effect");
432 warn_if_unused_value (stmt
, input_location
);
435 if (stmt
== NULL_TREE
)
436 stmt
= alloc_stmt_list ();
441 /* Gimplify initialization from an AGGR_INIT_EXPR. */
444 cp_gimplify_init_expr (tree
*expr_p
)
446 tree from
= TREE_OPERAND (*expr_p
, 1);
447 tree to
= TREE_OPERAND (*expr_p
, 0);
450 /* What about code that pulls out the temp and uses it elsewhere? I
451 think that such code never uses the TARGET_EXPR as an initializer. If
452 I'm wrong, we'll abort because the temp won't have any RTL. In that
453 case, I guess we'll need to replace references somehow. */
454 if (TREE_CODE (from
) == TARGET_EXPR
)
455 from
= TARGET_EXPR_INITIAL (from
);
457 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
458 inside the TARGET_EXPR. */
461 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
463 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
464 replace the slot operand with our target.
466 Should we add a target parm to gimplify_expr instead? No, as in this
467 case we want to replace the INIT_EXPR. */
468 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
469 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
471 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
472 AGGR_INIT_EXPR_SLOT (sub
) = to
;
474 VEC_INIT_EXPR_SLOT (sub
) = to
;
477 /* The initialization is now a side-effect, so the container can
480 TREE_TYPE (from
) = void_type_node
;
483 /* Handle aggregate NSDMI. */
484 replace_placeholders (sub
, to
);
489 t
= TREE_OPERAND (t
, 1);
494 /* Gimplify a MUST_NOT_THROW_EXPR. */
496 static enum gimplify_status
497 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
500 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
501 tree body
= TREE_OPERAND (stmt
, 0);
502 gimple_seq try_
= NULL
;
503 gimple_seq catch_
= NULL
;
506 gimplify_and_add (body
, &try_
);
507 mnt
= gimple_build_eh_must_not_throw (terminate_fn
);
508 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
509 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
511 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
522 /* Return TRUE if an operand (OP) of a given TYPE being copied is
523 really just an empty class copy.
525 Check that the operand has a simple form so that TARGET_EXPRs and
526 non-empty CONSTRUCTORs get reduced properly, and we leave the
527 return slot optimization alone because it isn't a copy. */
530 simple_empty_class_p (tree type
, tree op
)
533 ((TREE_CODE (op
) == COMPOUND_EXPR
534 && simple_empty_class_p (type
, TREE_OPERAND (op
, 1)))
535 || TREE_CODE (op
) == EMPTY_CLASS_EXPR
536 || is_gimple_lvalue (op
)
537 || INDIRECT_REF_P (op
)
538 || (TREE_CODE (op
) == CONSTRUCTOR
539 && CONSTRUCTOR_NELTS (op
) == 0
540 && !TREE_CLOBBER_P (op
))
541 || (TREE_CODE (op
) == CALL_EXPR
542 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
543 && is_really_empty_class (type
);
546 /* Returns true if evaluating E as an lvalue has side-effects;
547 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
548 have side-effects until there is a read or write through it. */
551 lvalue_has_side_effects (tree e
)
553 if (!TREE_SIDE_EFFECTS (e
))
555 while (handled_component_p (e
))
557 if (TREE_CODE (e
) == ARRAY_REF
558 && TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 1)))
560 e
= TREE_OPERAND (e
, 0);
563 /* Just naming a variable has no side-effects. */
565 else if (INDIRECT_REF_P (e
))
566 /* Similarly, indirection has no side-effects. */
567 return TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 0));
569 /* For anything else, trust TREE_SIDE_EFFECTS. */
570 return TREE_SIDE_EFFECTS (e
);
573 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
576 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
578 int saved_stmts_are_full_exprs_p
= 0;
579 location_t loc
= EXPR_LOC_OR_LOC (*expr_p
, input_location
);
580 enum tree_code code
= TREE_CODE (*expr_p
);
581 enum gimplify_status ret
;
583 if (STATEMENT_CODE_P (code
))
585 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
586 current_stmt_tree ()->stmts_are_full_exprs_p
587 = STMT_IS_FULL_EXPR_P (*expr_p
);
593 simplify_aggr_init_expr (expr_p
);
599 location_t loc
= input_location
;
600 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
601 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
602 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
603 input_location
= EXPR_LOCATION (*expr_p
);
604 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
605 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
607 tf_warning_or_error
);
609 cp_walk_tree (expr_p
, cp_fold_r
, &pset
, NULL
);
610 cp_genericize_tree (expr_p
, false);
612 input_location
= loc
;
617 /* FIXME communicate throw type to back end, probably by moving
618 THROW_EXPR into ../tree.def. */
619 *expr_p
= TREE_OPERAND (*expr_p
, 0);
623 case MUST_NOT_THROW_EXPR
:
624 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
627 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
628 LHS of an assignment might also be involved in the RHS, as in bug
631 if (fn_contains_cilk_spawn_p (cfun
))
633 if (cilk_cp_detect_spawn_and_unwrap (expr_p
))
634 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
635 if (seen_error () && contains_cilk_spawn_stmt (*expr_p
))
639 cp_gimplify_init_expr (expr_p
);
640 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
646 if (fn_contains_cilk_spawn_p (cfun
)
647 && cilk_cp_detect_spawn_and_unwrap (expr_p
)
649 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
650 /* If the back end isn't clever enough to know that the lhs and rhs
651 types are the same, add an explicit conversion. */
652 tree op0
= TREE_OPERAND (*expr_p
, 0);
653 tree op1
= TREE_OPERAND (*expr_p
, 1);
655 if (!error_operand_p (op0
)
656 && !error_operand_p (op1
)
657 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
658 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
659 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
660 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
661 TREE_TYPE (op0
), op1
);
663 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
))
665 /* Remove any copies of empty classes. Also drop volatile
666 variables on the RHS to avoid infinite recursion from
667 gimplify_expr trying to load the value. */
668 if (TREE_SIDE_EFFECTS (op1
))
670 if (TREE_THIS_VOLATILE (op1
)
671 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
672 op1
= build_fold_addr_expr (op1
);
674 gimplify_and_add (op1
, pre_p
);
676 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
677 is_gimple_lvalue
, fb_lvalue
);
678 *expr_p
= TREE_OPERAND (*expr_p
, 0);
680 /* P0145 says that the RHS is sequenced before the LHS.
681 gimplify_modify_expr gimplifies the RHS before the LHS, but that
682 isn't quite strong enough in two cases:
684 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
685 mean it's evaluated after the LHS.
687 2) the value calculation of the RHS is also sequenced before the
688 LHS, so for scalar assignment we need to preevaluate if the
689 RHS could be affected by LHS side-effects even if it has no
690 side-effects of its own. We don't need this for classes because
691 class assignment takes its RHS by reference. */
692 else if (flag_strong_eval_order
> 1
693 && TREE_CODE (*expr_p
) == MODIFY_EXPR
694 && lvalue_has_side_effects (op0
)
695 && (TREE_CODE (op1
) == CALL_EXPR
696 || (SCALAR_TYPE_P (TREE_TYPE (op1
))
697 && !TREE_CONSTANT (op1
))))
698 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (op1
, pre_p
);
703 case EMPTY_CLASS_EXPR
:
704 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
705 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
710 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
715 genericize_try_block (expr_p
);
720 genericize_catch_block (expr_p
);
725 genericize_eh_spec_block (expr_p
);
744 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
748 gimplify_expr_stmt (expr_p
);
752 case UNARY_PLUS_EXPR
:
754 tree arg
= TREE_OPERAND (*expr_p
, 0);
755 tree type
= TREE_TYPE (*expr_p
);
756 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
762 case CILK_SPAWN_STMT
:
763 gcc_assert(fn_contains_cilk_spawn_p (cfun
)
764 && cilk_cp_detect_spawn_and_unwrap (expr_p
));
767 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
771 if (fn_contains_cilk_spawn_p (cfun
)
772 && cilk_cp_detect_spawn_and_unwrap (expr_p
)
774 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
776 if (!CALL_EXPR_FN (*expr_p
))
777 /* Internal function call. */;
778 else if (CALL_EXPR_REVERSE_ARGS (*expr_p
))
780 /* This is a call to a (compound) assignment operator that used
781 the operator syntax; gimplify the RHS first. */
782 gcc_assert (call_expr_nargs (*expr_p
) == 2);
783 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p
));
784 enum gimplify_status t
785 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 1), pre_p
, loc
);
789 else if (CALL_EXPR_ORDERED_ARGS (*expr_p
))
791 /* Leave the last argument for gimplify_call_expr, to avoid problems
792 with __builtin_va_arg_pack(). */
793 int nargs
= call_expr_nargs (*expr_p
) - 1;
794 for (int i
= 0; i
< nargs
; ++i
)
796 enum gimplify_status t
797 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
);
802 else if (flag_strong_eval_order
803 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
))
805 /* If flag_strong_eval_order, evaluate the object argument first. */
806 tree fntype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
807 if (POINTER_TYPE_P (fntype
))
808 fntype
= TREE_TYPE (fntype
);
809 if (TREE_CODE (fntype
) == METHOD_TYPE
)
811 enum gimplify_status t
812 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 0), pre_p
, loc
);
820 if (TREE_OPERAND (*expr_p
, 0)
821 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
822 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
824 expr_p
= &TREE_OPERAND (*expr_p
, 0);
825 code
= TREE_CODE (*expr_p
);
826 /* Avoid going through the INIT_EXPR case, which can
827 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
828 goto modify_expr_case
;
833 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
837 /* Restore saved state. */
838 if (STATEMENT_CODE_P (code
))
839 current_stmt_tree ()->stmts_are_full_exprs_p
840 = saved_stmts_are_full_exprs_p
;
846 is_invisiref_parm (const_tree t
)
848 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
849 && DECL_BY_REFERENCE (t
));
852 /* Return true if the uid in both int tree maps are equal. */
855 cxx_int_tree_map_hasher::equal (cxx_int_tree_map
*a
, cxx_int_tree_map
*b
)
857 return (a
->uid
== b
->uid
);
860 /* Hash a UID in a cxx_int_tree_map. */
863 cxx_int_tree_map_hasher::hash (cxx_int_tree_map
*item
)
868 /* A stable comparison routine for use with splay trees and DECLs. */
871 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
876 return DECL_UID (a
) - DECL_UID (b
);
879 /* OpenMP context during genericization. */
881 struct cp_genericize_omp_taskreg
885 struct cp_genericize_omp_taskreg
*outer
;
886 splay_tree variables
;
889 /* Return true if genericization should try to determine if
890 DECL is firstprivate or shared within task regions. */
893 omp_var_to_track (tree decl
)
895 tree type
= TREE_TYPE (decl
);
896 if (is_invisiref_parm (decl
))
897 type
= TREE_TYPE (type
);
898 while (TREE_CODE (type
) == ARRAY_TYPE
)
899 type
= TREE_TYPE (type
);
900 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
902 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
904 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
909 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
912 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
914 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
915 (splay_tree_key
) decl
);
918 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
920 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
921 if (!omp_ctx
->default_shared
)
923 struct cp_genericize_omp_taskreg
*octx
;
925 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
927 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
928 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
930 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
933 if (octx
->is_parallel
)
937 && (TREE_CODE (decl
) == PARM_DECL
938 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
939 && DECL_CONTEXT (decl
) == current_function_decl
)))
940 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
941 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
943 /* DECL is implicitly determined firstprivate in
944 the current task construct. Ensure copy ctor and
945 dtor are instantiated, because during gimplification
946 it will be already too late. */
947 tree type
= TREE_TYPE (decl
);
948 if (is_invisiref_parm (decl
))
949 type
= TREE_TYPE (type
);
950 while (TREE_CODE (type
) == ARRAY_TYPE
)
951 type
= TREE_TYPE (type
);
952 get_copy_ctor (type
, tf_none
);
953 get_dtor (type
, tf_none
);
956 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
960 /* Genericization context. */
962 struct cp_genericize_data
964 hash_set
<tree
> *p_set
;
965 vec
<tree
> bind_expr_stack
;
966 struct cp_genericize_omp_taskreg
*omp_ctx
;
969 bool handle_invisiref_parm_p
;
972 /* Perform any pre-gimplification folding of C++ front end trees to
974 Note: The folding of none-omp cases is something to move into
975 the middle-end. As for now we have most foldings only on GENERIC
976 in fold-const, we need to perform this before transformation to
980 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
985 *stmt_p
= stmt
= cp_fold (*stmt_p
);
987 if (((hash_set
<tree
> *) data
)->add (stmt
))
989 /* Don't walk subtrees of stmts we've already walked once, otherwise
990 we can have exponential complexity with e.g. lots of nested
991 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
992 always the same tree, which the first time cp_fold_r has been
993 called on it had the subtrees walked. */
998 code
= TREE_CODE (stmt
);
999 if (code
== OMP_FOR
|| code
== OMP_SIMD
|| code
== OMP_DISTRIBUTE
1000 || code
== OMP_TASKLOOP
|| code
== CILK_FOR
|| code
== CILK_SIMD
1001 || code
== OACC_LOOP
)
1006 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
1007 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
1008 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
1009 x
= OMP_FOR_COND (stmt
);
1010 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
1012 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
1013 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
1015 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
1017 n
= TREE_VEC_LENGTH (x
);
1018 for (i
= 0; i
< n
; i
++)
1020 tree o
= TREE_VEC_ELT (x
, i
);
1021 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
1022 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1025 x
= OMP_FOR_INCR (stmt
);
1026 if (x
&& TREE_CODE (x
) == TREE_VEC
)
1028 n
= TREE_VEC_LENGTH (x
);
1029 for (i
= 0; i
< n
; i
++)
1031 tree o
= TREE_VEC_ELT (x
, i
);
1032 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
1033 o
= TREE_OPERAND (o
, 1);
1034 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
1035 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
1037 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
1038 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1042 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1049 /* Fold ALL the trees! FIXME we should be able to remove this, but
1050 apparently that still causes optimization regressions. */
1053 cp_fold_function (tree fndecl
)
1055 hash_set
<tree
> pset
;
1056 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &pset
, NULL
);
1059 /* Perform any pre-gimplification lowering of C++ front end trees to
1063 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1065 tree stmt
= *stmt_p
;
1066 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1067 hash_set
<tree
> *p_set
= wtd
->p_set
;
1069 /* If in an OpenMP context, note var uses. */
1070 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1072 || TREE_CODE (stmt
) == PARM_DECL
1073 || TREE_CODE (stmt
) == RESULT_DECL
)
1074 && omp_var_to_track (stmt
))
1075 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1077 /* Dereference invisible reference parms. */
1078 if (wtd
->handle_invisiref_parm_p
&& is_invisiref_parm (stmt
))
1080 *stmt_p
= convert_from_reference (stmt
);
1081 p_set
->add (*stmt_p
);
1086 /* Map block scope extern declarations to visible declarations with the
1087 same name and type in outer scopes if any. */
1088 if (cp_function_chain
->extern_decl_map
1089 && VAR_OR_FUNCTION_DECL_P (stmt
)
1090 && DECL_EXTERNAL (stmt
))
1092 struct cxx_int_tree_map
*h
, in
;
1093 in
.uid
= DECL_UID (stmt
);
1094 h
= cp_function_chain
->extern_decl_map
->find_with_hash (&in
, in
.uid
);
1103 if (TREE_CODE (stmt
) == INTEGER_CST
1104 && TREE_CODE (TREE_TYPE (stmt
)) == REFERENCE_TYPE
1105 && (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1106 && !wtd
->no_sanitize_p
)
1108 ubsan_maybe_instrument_reference (stmt_p
);
1109 if (*stmt_p
!= stmt
)
1116 /* Other than invisiref parms, don't walk the same tree twice. */
1117 if (p_set
->contains (stmt
))
1123 switch (TREE_CODE (stmt
))
1126 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1128 /* If in an OpenMP context, note var uses. */
1129 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1130 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1131 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1132 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1138 if (TREE_OPERAND (stmt
, 0) && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1139 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1144 switch (OMP_CLAUSE_CODE (stmt
))
1146 case OMP_CLAUSE_LASTPRIVATE
:
1147 /* Don't dereference an invisiref in OpenMP clauses. */
1148 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1151 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1152 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1153 cp_genericize_r
, data
, NULL
);
1156 case OMP_CLAUSE_PRIVATE
:
1157 /* Don't dereference an invisiref in OpenMP clauses. */
1158 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1160 else if (wtd
->omp_ctx
!= NULL
)
1162 /* Private clause doesn't cause any references to the
1163 var in outer contexts, avoid calling
1164 omp_cxx_notice_variable for it. */
1165 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1166 wtd
->omp_ctx
= NULL
;
1167 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1173 case OMP_CLAUSE_SHARED
:
1174 case OMP_CLAUSE_FIRSTPRIVATE
:
1175 case OMP_CLAUSE_COPYIN
:
1176 case OMP_CLAUSE_COPYPRIVATE
:
1177 /* Don't dereference an invisiref in OpenMP clauses. */
1178 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1181 case OMP_CLAUSE_REDUCTION
:
1182 /* Don't dereference an invisiref in reduction clause's
1183 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1184 still needs to be genericized. */
1185 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1188 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1189 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1190 cp_genericize_r
, data
, NULL
);
1191 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1192 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1193 cp_genericize_r
, data
, NULL
);
1201 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1202 to lower this construct before scanning it, so we need to lower these
1203 before doing anything else. */
1205 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1206 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1209 CLEANUP_BODY (stmt
),
1210 CLEANUP_EXPR (stmt
));
1214 genericize_if_stmt (stmt_p
);
1215 /* *stmt_p has changed, tail recurse to handle it again. */
1216 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1218 /* COND_EXPR might have incompatible types in branches if one or both
1219 arms are bitfields. Fix it up now. */
1223 = (TREE_OPERAND (stmt
, 1)
1224 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1227 = (TREE_OPERAND (stmt
, 2)
1228 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1231 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1232 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1234 TREE_OPERAND (stmt
, 1)
1235 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1236 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1240 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1241 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1243 TREE_OPERAND (stmt
, 2)
1244 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1245 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1252 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1255 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1257 && !DECL_EXTERNAL (decl
)
1258 && omp_var_to_track (decl
))
1261 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1262 (splay_tree_key
) decl
);
1264 splay_tree_insert (wtd
->omp_ctx
->variables
,
1265 (splay_tree_key
) decl
,
1267 ? OMP_CLAUSE_DEFAULT_SHARED
1268 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1271 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1273 /* The point here is to not sanitize static initializers. */
1274 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1275 wtd
->no_sanitize_p
= true;
1276 for (tree decl
= BIND_EXPR_VARS (stmt
);
1278 decl
= DECL_CHAIN (decl
))
1280 && TREE_STATIC (decl
)
1281 && DECL_INITIAL (decl
))
1282 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1283 wtd
->no_sanitize_p
= no_sanitize_p
;
1285 wtd
->bind_expr_stack
.safe_push (stmt
);
1286 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1287 cp_genericize_r
, data
, NULL
);
1288 wtd
->bind_expr_stack
.pop ();
1293 tree block
= NULL_TREE
;
1295 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1296 BLOCK, and append an IMPORTED_DECL to its
1297 BLOCK_VARS chained list. */
1298 if (wtd
->bind_expr_stack
.exists ())
1301 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1302 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1307 tree using_directive
;
1308 gcc_assert (TREE_OPERAND (stmt
, 0));
1310 using_directive
= make_node (IMPORTED_DECL
);
1311 TREE_TYPE (using_directive
) = void_type_node
;
1313 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1314 = TREE_OPERAND (stmt
, 0);
1315 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1316 BLOCK_VARS (block
) = using_directive
;
1318 /* The USING_STMT won't appear in GENERIC. */
1319 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1325 if (TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1327 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1328 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1333 tree d
= DECL_EXPR_DECL (stmt
);
1335 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1343 struct cp_genericize_omp_taskreg omp_ctx
;
1348 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1349 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1350 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1351 omp_ctx
.outer
= wtd
->omp_ctx
;
1352 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1353 wtd
->omp_ctx
= &omp_ctx
;
1354 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1355 switch (OMP_CLAUSE_CODE (c
))
1357 case OMP_CLAUSE_SHARED
:
1358 case OMP_CLAUSE_PRIVATE
:
1359 case OMP_CLAUSE_FIRSTPRIVATE
:
1360 case OMP_CLAUSE_LASTPRIVATE
:
1361 decl
= OMP_CLAUSE_DECL (c
);
1362 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1364 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1367 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1368 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1369 ? OMP_CLAUSE_DEFAULT_SHARED
1370 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1371 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
&& omp_ctx
.outer
)
1372 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1374 case OMP_CLAUSE_DEFAULT
:
1375 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1376 omp_ctx
.default_shared
= true;
1380 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1381 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1383 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1384 wtd
->omp_ctx
= omp_ctx
.outer
;
1385 splay_tree_delete (omp_ctx
.variables
);
1392 tree try_block
= wtd
->try_block
;
1393 wtd
->try_block
= stmt
;
1394 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1395 wtd
->try_block
= try_block
;
1396 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1400 case MUST_NOT_THROW_EXPR
:
1401 /* MUST_NOT_THROW_COND might be something else with TM. */
1402 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1405 tree try_block
= wtd
->try_block
;
1406 wtd
->try_block
= stmt
;
1407 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1408 wtd
->try_block
= try_block
;
1414 location_t loc
= location_of (stmt
);
1415 if (TREE_NO_WARNING (stmt
))
1417 else if (wtd
->try_block
)
1419 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
1420 && warning_at (loc
, OPT_Wterminate
,
1421 "throw will always call terminate()")
1422 && cxx_dialect
>= cxx11
1423 && DECL_DESTRUCTOR_P (current_function_decl
))
1424 inform (loc
, "in C++11 destructors default to noexcept");
1428 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1429 && DECL_DESTRUCTOR_P (current_function_decl
)
1430 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1432 && (get_defaulted_eh_spec (current_function_decl
)
1433 == empty_except_spec
))
1434 warning_at (loc
, OPT_Wc__11_compat
,
1435 "in C++11 this throw will terminate because "
1436 "destructors default to noexcept");
1442 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1446 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1450 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1454 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1458 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1462 genericize_continue_stmt (stmt_p
);
1466 genericize_break_stmt (stmt_p
);
1471 case OMP_DISTRIBUTE
:
1472 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1476 /* By the time we get here we're handing off to the back end, so we don't
1477 need or want to preserve PTRMEM_CST anymore. */
1478 *stmt_p
= cplus_expand_constant (stmt
);
1483 /* For MEM_REF, make sure not to sanitize the second operand even
1484 if it has reference type. It is just an offset with a type
1485 holding other information. There is no other processing we
1486 need to do for INTEGER_CSTs, so just ignore the second argument
1488 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1493 if (!wtd
->no_sanitize_p
1494 && sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)
1495 && TREE_CODE (TREE_TYPE (stmt
)) == REFERENCE_TYPE
)
1496 ubsan_maybe_instrument_reference (stmt_p
);
1500 if (!wtd
->no_sanitize_p
1501 && sanitize_flags_p ((SANITIZE_NULL
1502 | SANITIZE_ALIGNMENT
| SANITIZE_VPTR
)))
1504 tree fn
= CALL_EXPR_FN (stmt
);
1506 && !error_operand_p (fn
)
1507 && POINTER_TYPE_P (TREE_TYPE (fn
))
1508 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1511 = TREE_CODE (fn
) == ADDR_EXPR
1512 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1513 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1514 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1515 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1516 if (sanitize_flags_p (SANITIZE_VPTR
) && !is_ctor
)
1517 cp_ubsan_maybe_instrument_member_call (stmt
);
1523 if (IS_TYPE_OR_DECL_P (stmt
))
1528 p_set
->add (*stmt_p
);
1533 /* Lower C++ front end trees to GENERIC in T_P. */
1536 cp_genericize_tree (tree
* t_p
, bool handle_invisiref_parm_p
)
1538 struct cp_genericize_data wtd
;
1540 wtd
.p_set
= new hash_set
<tree
>;
1541 wtd
.bind_expr_stack
.create (0);
1543 wtd
.try_block
= NULL_TREE
;
1544 wtd
.no_sanitize_p
= false;
1545 wtd
.handle_invisiref_parm_p
= handle_invisiref_parm_p
;
1546 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1548 wtd
.bind_expr_stack
.release ();
1549 if (sanitize_flags_p (SANITIZE_VPTR
))
1550 cp_ubsan_instrument_member_accesses (t_p
);
1553 /* If a function that should end with a return in non-void
1554 function doesn't obviously end with return, add ubsan
1555 instrumentation code to verify it at runtime. */
1558 cp_ubsan_maybe_instrument_return (tree fndecl
)
1560 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1561 || DECL_CONSTRUCTOR_P (fndecl
)
1562 || DECL_DESTRUCTOR_P (fndecl
)
1563 || !targetm
.warn_func_return (fndecl
))
1566 tree t
= DECL_SAVED_TREE (fndecl
);
1569 switch (TREE_CODE (t
))
1572 t
= BIND_EXPR_BODY (t
);
1574 case TRY_FINALLY_EXPR
:
1575 t
= TREE_OPERAND (t
, 0);
1577 case STATEMENT_LIST
:
1579 tree_stmt_iterator i
= tsi_last (t
);
1596 tree
*p
= &DECL_SAVED_TREE (fndecl
);
1597 if (TREE_CODE (*p
) == BIND_EXPR
)
1598 p
= &BIND_EXPR_BODY (*p
);
1599 t
= ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl
));
1600 append_to_statement_list (t
, p
);
1604 cp_genericize (tree fndecl
)
1608 /* Fix up the types of parms passed by invisible reference. */
1609 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1610 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1612 /* If a function's arguments are copied to create a thunk,
1613 then DECL_BY_REFERENCE will be set -- but the type of the
1614 argument will be a pointer type, so we will never get
1616 gcc_assert (!DECL_BY_REFERENCE (t
));
1617 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1618 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1619 DECL_BY_REFERENCE (t
) = 1;
1620 TREE_ADDRESSABLE (t
) = 0;
1624 /* Do the same for the return value. */
1625 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1627 t
= DECL_RESULT (fndecl
);
1628 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1629 DECL_BY_REFERENCE (t
) = 1;
1630 TREE_ADDRESSABLE (t
) = 0;
1634 /* Adjust DECL_VALUE_EXPR of the original var. */
1635 tree outer
= outer_curly_brace_block (current_function_decl
);
1639 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1641 && DECL_NAME (t
) == DECL_NAME (var
)
1642 && DECL_HAS_VALUE_EXPR_P (var
)
1643 && DECL_VALUE_EXPR (var
) == t
)
1645 tree val
= convert_from_reference (t
);
1646 SET_DECL_VALUE_EXPR (var
, val
);
1652 /* If we're a clone, the body is already GIMPLE. */
1653 if (DECL_CLONED_FUNCTION_P (fndecl
))
1656 /* Allow cp_genericize calls to be nested. */
1657 tree save_bc_label
[2];
1658 save_bc_label
[bc_break
] = bc_label
[bc_break
];
1659 save_bc_label
[bc_continue
] = bc_label
[bc_continue
];
1660 bc_label
[bc_break
] = NULL_TREE
;
1661 bc_label
[bc_continue
] = NULL_TREE
;
1663 /* Expand all the array notations here. */
1665 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl
)))
1666 DECL_SAVED_TREE (fndecl
)
1667 = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl
));
1669 /* We do want to see every occurrence of the parms, so we can't just use
1670 walk_tree's hash functionality. */
1671 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
), true);
1673 if (sanitize_flags_p (SANITIZE_RETURN
)
1674 && current_function_decl
!= NULL_TREE
)
1675 cp_ubsan_maybe_instrument_return (fndecl
);
1677 /* Do everything else. */
1678 c_genericize (fndecl
);
1680 gcc_assert (bc_label
[bc_break
] == NULL
);
1681 gcc_assert (bc_label
[bc_continue
] == NULL
);
1682 bc_label
[bc_break
] = save_bc_label
[bc_break
];
1683 bc_label
[bc_continue
] = save_bc_label
[bc_continue
];
1686 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1687 NULL if there is in fact nothing to do. ARG2 may be null if FN
1688 actually only takes one argument. */
1691 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1693 tree defparm
, parm
, t
;
1701 nargs
= list_length (DECL_ARGUMENTS (fn
));
1702 argarray
= XALLOCAVEC (tree
, nargs
);
1704 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1706 defparm
= TREE_CHAIN (defparm
);
1708 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1710 tree inner_type
= TREE_TYPE (arg1
);
1711 tree start1
, end1
, p1
;
1712 tree start2
= NULL
, p2
= NULL
;
1713 tree ret
= NULL
, lab
;
1719 inner_type
= TREE_TYPE (inner_type
);
1720 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1721 size_zero_node
, NULL
, NULL
);
1723 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1724 size_zero_node
, NULL
, NULL
);
1726 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1727 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1729 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1731 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1732 end1
= fold_build_pointer_plus (start1
, end1
);
1734 p1
= create_tmp_var (TREE_TYPE (start1
));
1735 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1736 append_to_statement_list (t
, &ret
);
1740 p2
= create_tmp_var (TREE_TYPE (start2
));
1741 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1742 append_to_statement_list (t
, &ret
);
1745 lab
= create_artificial_label (input_location
);
1746 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1747 append_to_statement_list (t
, &ret
);
1752 /* Handle default arguments. */
1753 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1754 parm
= TREE_CHAIN (parm
), i
++)
1755 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1756 TREE_PURPOSE (parm
), fn
, i
,
1757 tf_warning_or_error
);
1758 t
= build_call_a (fn
, i
, argarray
);
1759 t
= fold_convert (void_type_node
, t
);
1760 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1761 append_to_statement_list (t
, &ret
);
1763 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1764 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1765 append_to_statement_list (t
, &ret
);
1769 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1770 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1771 append_to_statement_list (t
, &ret
);
1774 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1775 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1776 append_to_statement_list (t
, &ret
);
1782 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1784 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1785 /* Handle default arguments. */
1786 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1787 parm
= TREE_CHAIN (parm
), i
++)
1788 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1789 TREE_PURPOSE (parm
),
1790 fn
, i
, tf_warning_or_error
);
1791 t
= build_call_a (fn
, i
, argarray
);
1792 t
= fold_convert (void_type_node
, t
);
1793 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1797 /* Return code to initialize DECL with its default constructor, or
1798 NULL if there's nothing to do. */
1801 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1803 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1807 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1812 /* Return code to initialize DST with a copy constructor from SRC. */
1815 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1817 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1821 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1823 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1828 /* Similarly, except use an assignment operator instead. */
1831 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1833 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1837 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1839 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1844 /* Return code to destroy DECL. */
1847 cxx_omp_clause_dtor (tree clause
, tree decl
)
1849 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1853 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1858 /* True if OpenMP should privatize what this DECL points to rather
1859 than the DECL itself. */
1862 cxx_omp_privatize_by_reference (const_tree decl
)
1864 return (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
1865 || is_invisiref_parm (decl
));
1868 /* Return true if DECL is const qualified var having no mutable member. */
1870 cxx_omp_const_qual_no_mutable (tree decl
)
1872 tree type
= TREE_TYPE (decl
);
1873 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1875 if (!is_invisiref_parm (decl
))
1877 type
= TREE_TYPE (type
);
1879 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1881 /* NVR doesn't preserve const qualification of the
1883 tree outer
= outer_curly_brace_block (current_function_decl
);
1887 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1889 && DECL_NAME (decl
) == DECL_NAME (var
)
1890 && (TYPE_MAIN_VARIANT (type
)
1891 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1893 if (TYPE_READONLY (TREE_TYPE (var
)))
1894 type
= TREE_TYPE (var
);
1900 if (type
== error_mark_node
)
1903 /* Variables with const-qualified type having no mutable member
1904 are predetermined shared. */
1905 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1911 /* True if OpenMP sharing attribute of DECL is predetermined. */
1913 enum omp_clause_default_kind
1914 cxx_omp_predetermined_sharing (tree decl
)
1916 /* Static data members are predetermined shared. */
1917 if (TREE_STATIC (decl
))
1919 tree ctx
= CP_DECL_CONTEXT (decl
);
1920 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1921 return OMP_CLAUSE_DEFAULT_SHARED
;
1924 /* Const qualified vars having no mutable member are predetermined
1926 if (cxx_omp_const_qual_no_mutable (decl
))
1927 return OMP_CLAUSE_DEFAULT_SHARED
;
1929 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1932 /* Finalize an implicitly determined clause. */
1935 cxx_omp_finish_clause (tree c
, gimple_seq
*)
1937 tree decl
, inner_type
;
1938 bool make_shared
= false;
1940 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1943 decl
= OMP_CLAUSE_DECL (c
);
1944 decl
= require_complete_type (decl
);
1945 inner_type
= TREE_TYPE (decl
);
1946 if (decl
== error_mark_node
)
1948 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1949 inner_type
= TREE_TYPE (inner_type
);
1951 /* We're interested in the base element, not arrays. */
1952 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1953 inner_type
= TREE_TYPE (inner_type
);
1955 /* Check for special function availability by building a call to one.
1956 Save the results, because later we won't be in the right context
1957 for making these queries. */
1959 && CLASS_TYPE_P (inner_type
)
1960 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
1965 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
1966 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
) = 0;
1967 OMP_CLAUSE_SHARED_READONLY (c
) = 0;
1971 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1972 disregarded in OpenMP construct, because it is going to be
1973 remapped during OpenMP lowering. SHARED is true if DECL
1974 is going to be shared, false if it is going to be privatized. */
1977 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
1981 && DECL_HAS_VALUE_EXPR_P (decl
)
1982 && DECL_ARTIFICIAL (decl
)
1983 && DECL_LANG_SPECIFIC (decl
)
1984 && DECL_OMP_PRIVATIZED_MEMBER (decl
);
1987 /* Fold expression X which is used as an rvalue if RVAL is true. */
1990 cp_fold_maybe_rvalue (tree x
, bool rval
)
1995 if (rval
&& DECL_P (x
)
1996 && TREE_CODE (TREE_TYPE (x
)) != REFERENCE_TYPE
)
1998 tree v
= decl_constant_value (x
);
1999 if (v
!= x
&& v
!= error_mark_node
)
2010 /* Fold expression X which is used as an rvalue. */
2013 cp_fold_rvalue (tree x
)
2015 return cp_fold_maybe_rvalue (x
, true);
2018 /* Perform folding on expression X. */
2021 cp_fully_fold (tree x
)
2023 if (processing_template_decl
)
2025 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2026 have to call both. */
2027 if (cxx_dialect
>= cxx11
)
2028 x
= maybe_constant_value (x
);
2029 return cp_fold_rvalue (x
);
2032 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2033 and certain changes are made to the folding done. Or should be (FIXME). We
2034 never touch maybe_const, as it is only used for the C front-end
2035 C_MAYBE_CONST_EXPR. */
2038 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
)
2040 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2042 return cp_fold_rvalue (x
);
2045 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_cache
;
2047 /* Dispose of the whole FOLD_CACHE. */
2050 clear_fold_cache (void)
2052 if (fold_cache
!= NULL
)
2053 fold_cache
->empty ();
2056 /* This function tries to fold an expression X.
2057 To avoid combinatorial explosion, folding results are kept in fold_cache.
2058 If we are processing a template or X is invalid, we don't fold at all.
2059 For performance reasons we don't cache expressions representing a
2060 declaration or constant.
2061 Function returns X or its folded variant. */
2066 tree op0
, op1
, op2
, op3
;
2067 tree org_x
= x
, r
= NULL_TREE
;
2068 enum tree_code code
;
2070 bool rval_ops
= true;
2072 if (!x
|| x
== error_mark_node
)
2075 if (processing_template_decl
2076 || (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
)))
2079 /* Don't bother to cache DECLs or constants. */
2080 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
2083 if (fold_cache
== NULL
)
2084 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
2086 if (tree
*cached
= fold_cache
->get (x
))
2089 code
= TREE_CODE (x
);
2092 case CLEANUP_POINT_EXPR
:
2093 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2095 r
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2096 if (!TREE_SIDE_EFFECTS (r
))
2101 x
= fold_sizeof_expr (x
);
2104 case VIEW_CONVERT_EXPR
:
2109 case NON_LVALUE_EXPR
:
2111 if (VOID_TYPE_P (TREE_TYPE (x
)))
2114 loc
= EXPR_LOCATION (x
);
2115 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2117 if (code
== CONVERT_EXPR
2118 && SCALAR_TYPE_P (TREE_TYPE (x
))
2119 && op0
!= void_node
)
2120 /* During parsing we used convert_to_*_nofold; re-convert now using the
2121 folding variants, since fold() doesn't do those transformations. */
2122 x
= fold (convert (TREE_TYPE (x
), op0
));
2123 else if (op0
!= TREE_OPERAND (x
, 0))
2125 if (op0
== error_mark_node
)
2126 x
= error_mark_node
;
2128 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2133 /* Conversion of an out-of-range value has implementation-defined
2134 behavior; the language considers it different from arithmetic
2135 overflow, which is undefined. */
2136 if (TREE_CODE (op0
) == INTEGER_CST
2137 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
2138 TREE_OVERFLOW (x
) = false;
2143 /* We don't need the decltype(auto) obfuscation anymore. */
2144 if (REF_PARENTHESIZED_P (x
))
2146 tree p
= maybe_undo_parenthesized_ref (x
);
2157 case FIX_TRUNC_EXPR
:
2162 case TRUTH_NOT_EXPR
:
2163 case FIXED_CONVERT_EXPR
:
2166 loc
= EXPR_LOCATION (x
);
2167 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2169 if (op0
!= TREE_OPERAND (x
, 0))
2171 if (op0
== error_mark_node
)
2172 x
= error_mark_node
;
2175 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2176 if (code
== INDIRECT_REF
2177 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
2179 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2180 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2181 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2188 gcc_assert (TREE_CODE (x
) != COND_EXPR
2189 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
2192 case UNARY_PLUS_EXPR
:
2193 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2194 if (op0
== error_mark_node
)
2195 x
= error_mark_node
;
2197 x
= fold_convert (TREE_TYPE (x
), op0
);
2200 case POSTDECREMENT_EXPR
:
2201 case POSTINCREMENT_EXPR
:
2203 case PREDECREMENT_EXPR
:
2204 case PREINCREMENT_EXPR
:
2209 case POINTER_PLUS_EXPR
:
2213 case TRUNC_DIV_EXPR
:
2215 case FLOOR_DIV_EXPR
:
2216 case ROUND_DIV_EXPR
:
2217 case TRUNC_MOD_EXPR
:
2219 case ROUND_MOD_EXPR
:
2221 case EXACT_DIV_EXPR
:
2231 case TRUTH_AND_EXPR
:
2232 case TRUTH_ANDIF_EXPR
:
2234 case TRUTH_ORIF_EXPR
:
2235 case TRUTH_XOR_EXPR
:
2236 case LT_EXPR
: case LE_EXPR
:
2237 case GT_EXPR
: case GE_EXPR
:
2238 case EQ_EXPR
: case NE_EXPR
:
2239 case UNORDERED_EXPR
: case ORDERED_EXPR
:
2240 case UNLT_EXPR
: case UNLE_EXPR
:
2241 case UNGT_EXPR
: case UNGE_EXPR
:
2242 case UNEQ_EXPR
: case LTGT_EXPR
:
2243 case RANGE_EXPR
: case COMPLEX_EXPR
:
2245 loc
= EXPR_LOCATION (x
);
2246 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2247 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1));
2249 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
2251 if (op0
== error_mark_node
|| op1
== error_mark_node
)
2252 x
= error_mark_node
;
2254 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
2259 if (TREE_NO_WARNING (org_x
)
2260 && warn_nonnull_compare
2261 && COMPARISON_CLASS_P (org_x
))
2263 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
2265 else if (COMPARISON_CLASS_P (x
))
2266 TREE_NO_WARNING (x
) = 1;
2267 /* Otherwise give up on optimizing these, let GIMPLE folders
2268 optimize those later on. */
2269 else if (op0
!= TREE_OPERAND (org_x
, 0)
2270 || op1
!= TREE_OPERAND (org_x
, 1))
2272 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
2273 TREE_NO_WARNING (x
) = 1;
2283 /* Don't bother folding a void condition, since it can't produce a
2284 constant value. Also, some statement-level uses of COND_EXPR leave
2285 one of the branches NULL, so folding would crash. */
2286 if (VOID_TYPE_P (TREE_TYPE (x
)))
2289 loc
= EXPR_LOCATION (x
);
2290 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2291 op1
= cp_fold (TREE_OPERAND (x
, 1));
2292 op2
= cp_fold (TREE_OPERAND (x
, 2));
2294 if (TREE_CODE (TREE_TYPE (x
)) == BOOLEAN_TYPE
)
2296 warning_sentinel
s (warn_int_in_bool_context
);
2297 if (!VOID_TYPE_P (TREE_TYPE (op1
)))
2298 op1
= cp_truthvalue_conversion (op1
);
2299 if (!VOID_TYPE_P (TREE_TYPE (op2
)))
2300 op2
= cp_truthvalue_conversion (op2
);
2303 if (op0
!= TREE_OPERAND (x
, 0)
2304 || op1
!= TREE_OPERAND (x
, 1)
2305 || op2
!= TREE_OPERAND (x
, 2))
2307 if (op0
== error_mark_node
2308 || op1
== error_mark_node
2309 || op2
== error_mark_node
)
2310 x
= error_mark_node
;
2312 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2317 /* A COND_EXPR might have incompatible types in branches if one or both
2318 arms are bitfields. If folding exposed such a branch, fix it up. */
2319 if (TREE_CODE (x
) != code
2320 && !useless_type_conversion_p (TREE_TYPE (org_x
), TREE_TYPE (x
)))
2321 x
= fold_convert (TREE_TYPE (org_x
), x
);
2327 int i
, m
, sv
= optimize
, nw
= sv
, changed
= 0;
2328 tree callee
= get_callee_fndecl (x
);
2330 /* Some built-in function calls will be evaluated at compile-time in
2331 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2332 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2333 if (callee
&& DECL_BUILT_IN (callee
) && !optimize
2334 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
2335 && current_function_decl
2336 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
2341 m
= call_expr_nargs (x
);
2342 for (i
= 0; i
< m
; i
++)
2344 r
= cp_fold (CALL_EXPR_ARG (x
, i
));
2345 if (r
!= CALL_EXPR_ARG (x
, i
))
2347 if (r
== error_mark_node
)
2349 x
= error_mark_node
;
2354 CALL_EXPR_ARG (x
, i
) = r
;
2356 if (x
== error_mark_node
)
2363 if (TREE_CODE (r
) != CALL_EXPR
)
2371 /* Invoke maybe_constant_value for functions declared
2372 constexpr and not called with AGGR_INIT_EXPRs.
2374 Do constexpr expansion of expressions where the call itself is not
2375 constant, but the call followed by an INDIRECT_REF is. */
2376 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
2378 r
= maybe_constant_value (x
);
2381 if (TREE_CODE (r
) != CALL_EXPR
)
2383 if (DECL_CONSTRUCTOR_P (callee
))
2385 loc
= EXPR_LOCATION (x
);
2386 tree s
= build_fold_indirect_ref_loc (loc
,
2387 CALL_EXPR_ARG (x
, 0));
2388 r
= build2_loc (loc
, INIT_EXPR
, TREE_TYPE (s
), s
, r
);
2403 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
2404 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
2405 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
2407 tree op
= cp_fold (p
->value
);
2410 if (op
== error_mark_node
)
2412 x
= error_mark_node
;
2417 nelts
= elts
->copy ();
2418 (*nelts
)[i
].value
= op
;
2422 x
= build_constructor (TREE_TYPE (x
), nelts
);
2427 bool changed
= false;
2428 vec
<tree
, va_gc
> *vec
= make_tree_vector ();
2429 int i
, n
= TREE_VEC_LENGTH (x
);
2430 vec_safe_reserve (vec
, n
);
2432 for (i
= 0; i
< n
; i
++)
2434 tree op
= cp_fold (TREE_VEC_ELT (x
, i
));
2435 vec
->quick_push (op
);
2436 if (op
!= TREE_VEC_ELT (x
, i
))
2443 for (i
= 0; i
< n
; i
++)
2444 TREE_VEC_ELT (r
, i
) = (*vec
)[i
];
2448 release_tree_vector (vec
);
2454 case ARRAY_RANGE_REF
:
2456 loc
= EXPR_LOCATION (x
);
2457 op0
= cp_fold (TREE_OPERAND (x
, 0));
2458 op1
= cp_fold (TREE_OPERAND (x
, 1));
2459 op2
= cp_fold (TREE_OPERAND (x
, 2));
2460 op3
= cp_fold (TREE_OPERAND (x
, 3));
2462 if (op0
!= TREE_OPERAND (x
, 0)
2463 || op1
!= TREE_OPERAND (x
, 1)
2464 || op2
!= TREE_OPERAND (x
, 2)
2465 || op3
!= TREE_OPERAND (x
, 3))
2467 if (op0
== error_mark_node
2468 || op1
== error_mark_node
2469 || op2
== error_mark_node
2470 || op3
== error_mark_node
)
2471 x
= error_mark_node
;
2474 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
2475 TREE_READONLY (x
) = TREE_READONLY (org_x
);
2476 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
2477 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
2485 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2486 folding, evaluates to an invariant. In that case no need to wrap
2487 this folded tree with a SAVE_EXPR. */
2488 r
= cp_fold (TREE_OPERAND (x
, 0));
2489 if (tree_invariant_p (r
))
2497 fold_cache
->put (org_x
, x
);
2498 /* Prevent that we try to fold an already folded result again. */
2500 fold_cache
->put (x
, x
);
2505 #include "gt-cp-cp-gimplify.h"