1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
36 /* Forward declarations. */
38 static tree
cp_genericize_r (tree
*, int *, void *);
39 static tree
cp_fold_r (tree
*, int *, void *);
40 static void cp_genericize_tree (tree
*);
41 static tree
cp_fold (tree
);
43 /* Local declarations. */
45 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
47 /* Stack of labels which are targets for "break" or "continue",
48 linked through TREE_CHAIN. */
49 static tree bc_label
[2];
51 /* Begin a scope which can be exited by a break or continue statement. BC
54 Just creates a label with location LOCATION and pushes it into the current
58 begin_bc_block (enum bc_t bc
, location_t location
)
60 tree label
= create_artificial_label (location
);
61 DECL_CHAIN (label
) = bc_label
[bc
];
64 LABEL_DECL_BREAK (label
) = true;
66 LABEL_DECL_CONTINUE (label
) = true;
70 /* Finish a scope which can be exited by a break or continue statement.
71 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
72 an expression for the contents of the scope.
74 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
75 BLOCK. Otherwise, just forget the label. */
78 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
80 gcc_assert (label
== bc_label
[bc
]);
82 if (TREE_USED (label
))
83 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
86 bc_label
[bc
] = DECL_CHAIN (label
);
87 DECL_CHAIN (label
) = NULL_TREE
;
90 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
91 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
92 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
93 of gimplify_cilk_spawn. */
96 cilk_cp_gimplify_call_params_in_spawned_fn (tree
*expr_p
, gimple_seq
*pre_p
,
101 cilk_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
);
102 if (TREE_CODE (*expr_p
) == AGGR_INIT_EXPR
)
103 for (ii
= 0; ii
< aggr_init_expr_nargs (*expr_p
); ii
++)
104 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p
, ii
), pre_p
, post_p
,
105 is_gimple_reg
, fb_rvalue
);
109 /* Get the LABEL_EXPR to represent a break or continue statement
110 in the current block scope. BC indicates which. */
113 get_bc_label (enum bc_t bc
)
115 tree label
= bc_label
[bc
];
117 /* Mark the label used for finish_bc_block. */
118 TREE_USED (label
) = 1;
122 /* Genericize a TRY_BLOCK. */
125 genericize_try_block (tree
*stmt_p
)
127 tree body
= TRY_STMTS (*stmt_p
);
128 tree cleanup
= TRY_HANDLERS (*stmt_p
);
130 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
133 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
136 genericize_catch_block (tree
*stmt_p
)
138 tree type
= HANDLER_TYPE (*stmt_p
);
139 tree body
= HANDLER_BODY (*stmt_p
);
141 /* FIXME should the caught type go in TREE_TYPE? */
142 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
145 /* A terser interface for building a representation of an exception
149 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
153 /* FIXME should the allowed types go in TREE_TYPE? */
154 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
155 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
157 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
158 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
163 /* Genericize an EH_SPEC_BLOCK by converting it to a
164 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
167 genericize_eh_spec_block (tree
*stmt_p
)
169 tree body
= EH_SPEC_STMTS (*stmt_p
);
170 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
171 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
173 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
174 TREE_NO_WARNING (*stmt_p
) = true;
175 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
178 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
181 genericize_if_stmt (tree
*stmt_p
)
183 tree stmt
, cond
, then_
, else_
;
184 location_t locus
= EXPR_LOCATION (*stmt_p
);
187 cond
= IF_COND (stmt
);
188 then_
= THEN_CLAUSE (stmt
);
189 else_
= ELSE_CLAUSE (stmt
);
192 then_
= build_empty_stmt (locus
);
194 else_
= build_empty_stmt (locus
);
196 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
198 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
201 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
202 if (!EXPR_HAS_LOCATION (stmt
))
203 protected_set_expr_location (stmt
, locus
);
207 /* Build a generic representation of one of the C loop forms. COND is the
208 loop condition or NULL_TREE. BODY is the (possibly compound) statement
209 controlled by the loop. INCR is the increment expression of a for-loop,
210 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
211 evaluated before the loop body as in while and for loops, or after the
212 loop body as in do-while loops. */
215 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
216 tree incr
, bool cond_is_first
, int *walk_subtrees
,
221 tree stmt_list
= NULL
;
223 blab
= begin_bc_block (bc_break
, start_locus
);
224 clab
= begin_bc_block (bc_continue
, start_locus
);
226 protected_set_expr_location (incr
, start_locus
);
228 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
229 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
230 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
233 if (cond
&& TREE_CODE (cond
) != INTEGER_CST
)
235 /* If COND is constant, don't bother building an exit. If it's false,
236 we won't build a loop. If it's true, any exits are in the body. */
237 location_t cloc
= EXPR_LOC_OR_LOC (cond
, start_locus
);
238 exit
= build1_loc (cloc
, GOTO_EXPR
, void_type_node
,
239 get_bc_label (bc_break
));
240 exit
= fold_build3_loc (cloc
, COND_EXPR
, void_type_node
, cond
,
241 build_empty_stmt (cloc
), exit
);
244 if (exit
&& cond_is_first
)
245 append_to_statement_list (exit
, &stmt_list
);
246 append_to_statement_list (body
, &stmt_list
);
247 finish_bc_block (&stmt_list
, bc_continue
, clab
);
248 append_to_statement_list (incr
, &stmt_list
);
249 if (exit
&& !cond_is_first
)
250 append_to_statement_list (exit
, &stmt_list
);
253 stmt_list
= build_empty_stmt (start_locus
);
256 if (cond
&& integer_zerop (cond
))
259 loop
= fold_build3_loc (start_locus
, COND_EXPR
,
260 void_type_node
, cond
, stmt_list
,
261 build_empty_stmt (start_locus
));
267 location_t loc
= start_locus
;
268 if (!cond
|| integer_nonzerop (cond
))
269 loc
= EXPR_LOCATION (expr_first (body
));
270 if (loc
== UNKNOWN_LOCATION
)
272 loop
= build1_loc (loc
, LOOP_EXPR
, void_type_node
, stmt_list
);
276 append_to_statement_list (loop
, &stmt_list
);
277 finish_bc_block (&stmt_list
, bc_break
, blab
);
279 stmt_list
= build_empty_stmt (start_locus
);
284 /* Genericize a FOR_STMT node *STMT_P. */
287 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
292 tree init
= FOR_INIT_STMT (stmt
);
296 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
297 append_to_statement_list (init
, &expr
);
300 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
301 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
302 append_to_statement_list (loop
, &expr
);
303 if (expr
== NULL_TREE
)
308 /* Genericize a WHILE_STMT node *STMT_P. */
311 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
314 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
315 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
318 /* Genericize a DO_STMT node *STMT_P. */
321 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
324 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
325 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
328 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
331 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
334 tree break_block
, body
, cond
, type
;
335 location_t stmt_locus
= EXPR_LOCATION (stmt
);
337 break_block
= begin_bc_block (bc_break
, stmt_locus
);
339 body
= SWITCH_STMT_BODY (stmt
);
341 body
= build_empty_stmt (stmt_locus
);
342 cond
= SWITCH_STMT_COND (stmt
);
343 type
= SWITCH_STMT_TYPE (stmt
);
345 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
346 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
347 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
350 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
351 finish_bc_block (stmt_p
, bc_break
, break_block
);
354 /* Genericize a CONTINUE_STMT node *STMT_P. */
357 genericize_continue_stmt (tree
*stmt_p
)
359 tree stmt_list
= NULL
;
360 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
361 tree label
= get_bc_label (bc_continue
);
362 location_t location
= EXPR_LOCATION (*stmt_p
);
363 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
364 append_to_statement_list (pred
, &stmt_list
);
365 append_to_statement_list (jump
, &stmt_list
);
369 /* Genericize a BREAK_STMT node *STMT_P. */
372 genericize_break_stmt (tree
*stmt_p
)
374 tree label
= get_bc_label (bc_break
);
375 location_t location
= EXPR_LOCATION (*stmt_p
);
376 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
379 /* Genericize a OMP_FOR node *STMT_P. */
382 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
385 location_t locus
= EXPR_LOCATION (stmt
);
386 tree clab
= begin_bc_block (bc_continue
, locus
);
388 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
389 if (TREE_CODE (stmt
) != OMP_TASKLOOP
)
390 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
391 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
392 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
393 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
394 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
397 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
400 /* Hook into the middle of gimplifying an OMP_FOR node. */
402 static enum gimplify_status
403 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
405 tree for_stmt
= *expr_p
;
406 gimple_seq seq
= NULL
;
408 /* Protect ourselves from recursion. */
409 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
411 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
413 gimplify_and_add (for_stmt
, &seq
);
414 gimple_seq_add_seq (pre_p
, seq
);
416 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
421 /* Gimplify an EXPR_STMT node. */
424 gimplify_expr_stmt (tree
*stmt_p
)
426 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
428 if (stmt
== error_mark_node
)
431 /* Gimplification of a statement expression will nullify the
432 statement if all its side effects are moved to *PRE_P and *POST_P.
434 In this case we will not want to emit the gimplified statement.
435 However, we may still want to emit a warning, so we do that before
437 if (stmt
&& warn_unused_value
)
439 if (!TREE_SIDE_EFFECTS (stmt
))
441 if (!IS_EMPTY_STMT (stmt
)
442 && !VOID_TYPE_P (TREE_TYPE (stmt
))
443 && !TREE_NO_WARNING (stmt
))
444 warning (OPT_Wunused_value
, "statement with no effect");
447 warn_if_unused_value (stmt
, input_location
);
450 if (stmt
== NULL_TREE
)
451 stmt
= alloc_stmt_list ();
456 /* Gimplify initialization from an AGGR_INIT_EXPR. */
459 cp_gimplify_init_expr (tree
*expr_p
)
461 tree from
= TREE_OPERAND (*expr_p
, 1);
462 tree to
= TREE_OPERAND (*expr_p
, 0);
465 /* What about code that pulls out the temp and uses it elsewhere? I
466 think that such code never uses the TARGET_EXPR as an initializer. If
467 I'm wrong, we'll abort because the temp won't have any RTL. In that
468 case, I guess we'll need to replace references somehow. */
469 if (TREE_CODE (from
) == TARGET_EXPR
)
470 from
= TARGET_EXPR_INITIAL (from
);
472 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
473 inside the TARGET_EXPR. */
476 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
478 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
479 replace the slot operand with our target.
481 Should we add a target parm to gimplify_expr instead? No, as in this
482 case we want to replace the INIT_EXPR. */
483 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
484 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
486 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
487 AGGR_INIT_EXPR_SLOT (sub
) = to
;
489 VEC_INIT_EXPR_SLOT (sub
) = to
;
492 /* The initialization is now a side-effect, so the container can
495 TREE_TYPE (from
) = void_type_node
;
498 if (cxx_dialect
>= cxx14
&& TREE_CODE (sub
) == CONSTRUCTOR
)
499 /* Handle aggregate NSDMI. */
500 replace_placeholders (sub
, to
);
505 t
= TREE_OPERAND (t
, 1);
510 /* Gimplify a MUST_NOT_THROW_EXPR. */
512 static enum gimplify_status
513 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
516 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
517 tree body
= TREE_OPERAND (stmt
, 0);
518 gimple_seq try_
= NULL
;
519 gimple_seq catch_
= NULL
;
522 gimplify_and_add (body
, &try_
);
523 mnt
= gimple_build_eh_must_not_throw (terminate_node
);
524 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
525 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
527 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
538 /* Return TRUE if an operand (OP) of a given TYPE being copied is
539 really just an empty class copy.
541 Check that the operand has a simple form so that TARGET_EXPRs and
542 non-empty CONSTRUCTORs get reduced properly, and we leave the
543 return slot optimization alone because it isn't a copy. */
546 simple_empty_class_p (tree type
, tree op
)
549 ((TREE_CODE (op
) == COMPOUND_EXPR
550 && simple_empty_class_p (type
, TREE_OPERAND (op
, 1)))
551 || is_gimple_lvalue (op
)
552 || INDIRECT_REF_P (op
)
553 || (TREE_CODE (op
) == CONSTRUCTOR
554 && CONSTRUCTOR_NELTS (op
) == 0
555 && !TREE_CLOBBER_P (op
))
556 || (TREE_CODE (op
) == CALL_EXPR
557 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
558 && is_really_empty_class (type
);
561 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
564 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
566 int saved_stmts_are_full_exprs_p
= 0;
567 enum tree_code code
= TREE_CODE (*expr_p
);
568 enum gimplify_status ret
;
570 if (STATEMENT_CODE_P (code
))
572 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
573 current_stmt_tree ()->stmts_are_full_exprs_p
574 = STMT_IS_FULL_EXPR_P (*expr_p
);
580 *expr_p
= cplus_expand_constant (*expr_p
);
585 simplify_aggr_init_expr (expr_p
);
591 location_t loc
= input_location
;
592 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
593 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
594 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
595 input_location
= EXPR_LOCATION (*expr_p
);
596 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
597 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
599 tf_warning_or_error
);
600 cp_genericize_tree (expr_p
);
602 input_location
= loc
;
607 /* FIXME communicate throw type to back end, probably by moving
608 THROW_EXPR into ../tree.def. */
609 *expr_p
= TREE_OPERAND (*expr_p
, 0);
613 case MUST_NOT_THROW_EXPR
:
614 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
617 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
618 LHS of an assignment might also be involved in the RHS, as in bug
621 if (fn_contains_cilk_spawn_p (cfun
))
623 if (cilk_detect_spawn_and_unwrap (expr_p
))
625 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
,
627 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
629 if (seen_error () && contains_cilk_spawn_stmt (*expr_p
))
633 cp_gimplify_init_expr (expr_p
);
634 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
636 /* Otherwise fall through. */
640 if (fn_contains_cilk_spawn_p (cfun
)
641 && cilk_detect_spawn_and_unwrap (expr_p
)
644 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
645 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
647 /* If the back end isn't clever enough to know that the lhs and rhs
648 types are the same, add an explicit conversion. */
649 tree op0
= TREE_OPERAND (*expr_p
, 0);
650 tree op1
= TREE_OPERAND (*expr_p
, 1);
652 if (!error_operand_p (op0
)
653 && !error_operand_p (op1
)
654 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
655 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
656 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
657 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
658 TREE_TYPE (op0
), op1
);
660 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
))
662 /* Remove any copies of empty classes. Also drop volatile
663 variables on the RHS to avoid infinite recursion from
664 gimplify_expr trying to load the value. */
665 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
666 is_gimple_lvalue
, fb_lvalue
);
667 if (TREE_SIDE_EFFECTS (op1
))
669 if (TREE_THIS_VOLATILE (op1
)
670 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
671 op1
= build_fold_addr_expr (op1
);
673 gimplify_and_add (op1
, pre_p
);
675 *expr_p
= TREE_OPERAND (*expr_p
, 0);
681 case EMPTY_CLASS_EXPR
:
682 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
683 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
688 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
693 genericize_try_block (expr_p
);
698 genericize_catch_block (expr_p
);
703 genericize_eh_spec_block (expr_p
);
722 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
726 gimplify_expr_stmt (expr_p
);
730 case UNARY_PLUS_EXPR
:
732 tree arg
= TREE_OPERAND (*expr_p
, 0);
733 tree type
= TREE_TYPE (*expr_p
);
734 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
740 case CILK_SPAWN_STMT
:
741 gcc_assert(fn_contains_cilk_spawn_p (cfun
)
742 && cilk_detect_spawn_and_unwrap (expr_p
));
746 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
747 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
752 if (fn_contains_cilk_spawn_p (cfun
)
753 && cilk_detect_spawn_and_unwrap (expr_p
)
756 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
757 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
759 /* DR 1030 says that we need to evaluate the elements of an
760 initializer-list in forward order even when it's used as arguments to
761 a constructor. So if the target wants to evaluate them in reverse
762 order and there's more than one argument other than 'this', gimplify
765 if (PUSH_ARGS_REVERSED
&& CALL_EXPR_LIST_INIT_P (*expr_p
)
766 && call_expr_nargs (*expr_p
) > 2)
768 int nargs
= call_expr_nargs (*expr_p
);
769 location_t loc
= EXPR_LOC_OR_LOC (*expr_p
, input_location
);
770 for (int i
= 1; i
< nargs
; ++i
)
772 enum gimplify_status t
773 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
);
781 if (TREE_OPERAND (*expr_p
, 0)
782 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
783 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
785 expr_p
= &TREE_OPERAND (*expr_p
, 0);
786 code
= TREE_CODE (*expr_p
);
787 /* Avoid going through the INIT_EXPR case, which can
788 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
789 goto modify_expr_case
;
794 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
798 /* Restore saved state. */
799 if (STATEMENT_CODE_P (code
))
800 current_stmt_tree ()->stmts_are_full_exprs_p
801 = saved_stmts_are_full_exprs_p
;
807 is_invisiref_parm (const_tree t
)
809 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
810 && DECL_BY_REFERENCE (t
));
813 /* Return true if the uid in both int tree maps are equal. */
816 cxx_int_tree_map_hasher::equal (cxx_int_tree_map
*a
, cxx_int_tree_map
*b
)
818 return (a
->uid
== b
->uid
);
821 /* Hash a UID in a cxx_int_tree_map. */
824 cxx_int_tree_map_hasher::hash (cxx_int_tree_map
*item
)
829 /* A stable comparison routine for use with splay trees and DECLs. */
832 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
837 return DECL_UID (a
) - DECL_UID (b
);
840 /* OpenMP context during genericization. */
842 struct cp_genericize_omp_taskreg
846 struct cp_genericize_omp_taskreg
*outer
;
847 splay_tree variables
;
850 /* Return true if genericization should try to determine if
851 DECL is firstprivate or shared within task regions. */
854 omp_var_to_track (tree decl
)
856 tree type
= TREE_TYPE (decl
);
857 if (is_invisiref_parm (decl
))
858 type
= TREE_TYPE (type
);
859 while (TREE_CODE (type
) == ARRAY_TYPE
)
860 type
= TREE_TYPE (type
);
861 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
863 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
865 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
870 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
873 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
875 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
876 (splay_tree_key
) decl
);
879 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
881 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
882 if (!omp_ctx
->default_shared
)
884 struct cp_genericize_omp_taskreg
*octx
;
886 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
888 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
889 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
891 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
894 if (octx
->is_parallel
)
898 && (TREE_CODE (decl
) == PARM_DECL
899 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
900 && DECL_CONTEXT (decl
) == current_function_decl
)))
901 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
902 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
904 /* DECL is implicitly determined firstprivate in
905 the current task construct. Ensure copy ctor and
906 dtor are instantiated, because during gimplification
907 it will be already too late. */
908 tree type
= TREE_TYPE (decl
);
909 if (is_invisiref_parm (decl
))
910 type
= TREE_TYPE (type
);
911 while (TREE_CODE (type
) == ARRAY_TYPE
)
912 type
= TREE_TYPE (type
);
913 get_copy_ctor (type
, tf_none
);
914 get_dtor (type
, tf_none
);
917 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
921 /* Genericization context. */
923 struct cp_genericize_data
925 hash_set
<tree
> *p_set
;
926 vec
<tree
> bind_expr_stack
;
927 struct cp_genericize_omp_taskreg
*omp_ctx
;
932 /* Perform any pre-gimplification folding of C++ front end trees to
934 Note: The folding of none-omp cases is something to move into
935 the middle-end. As for now we have most foldings only on GENERIC
936 in fold-const, we need to perform this before transformation to
940 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
945 *stmt_p
= stmt
= cp_fold (*stmt_p
);
947 code
= TREE_CODE (stmt
);
948 if (code
== OMP_FOR
|| code
== OMP_SIMD
|| code
== OMP_DISTRIBUTE
949 || code
== OMP_TASKLOOP
|| code
== CILK_FOR
|| code
== CILK_SIMD
950 || code
== OACC_LOOP
)
955 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
956 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
957 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
958 x
= OMP_FOR_COND (stmt
);
959 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
961 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
962 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
964 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
966 n
= TREE_VEC_LENGTH (x
);
967 for (i
= 0; i
< n
; i
++)
969 tree o
= TREE_VEC_ELT (x
, i
);
970 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
971 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
974 x
= OMP_FOR_INCR (stmt
);
975 if (x
&& TREE_CODE (x
) == TREE_VEC
)
977 n
= TREE_VEC_LENGTH (x
);
978 for (i
= 0; i
< n
; i
++)
980 tree o
= TREE_VEC_ELT (x
, i
);
981 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
982 o
= TREE_OPERAND (o
, 1);
983 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
984 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
986 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
987 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
991 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
998 /* Fold ALL the trees! FIXME we should be able to remove this, but
999 apparently that still causes optimization regressions. */
1002 cp_fold_function (tree fndecl
)
1004 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, NULL
, NULL
);
1007 /* Perform any pre-gimplification lowering of C++ front end trees to
1011 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1013 tree stmt
= *stmt_p
;
1014 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1015 hash_set
<tree
> *p_set
= wtd
->p_set
;
1017 /* If in an OpenMP context, note var uses. */
1018 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1020 || TREE_CODE (stmt
) == PARM_DECL
1021 || TREE_CODE (stmt
) == RESULT_DECL
)
1022 && omp_var_to_track (stmt
))
1023 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1025 /* Don't dereference parms in a thunk, pass the references through. */
1026 if ((TREE_CODE (stmt
) == CALL_EXPR
&& CALL_FROM_THUNK_P (stmt
))
1027 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1033 /* Otherwise, do dereference invisible reference parms. */
1034 if (is_invisiref_parm (stmt
))
1036 *stmt_p
= convert_from_reference (stmt
);
1041 /* Map block scope extern declarations to visible declarations with the
1042 same name and type in outer scopes if any. */
1043 if (cp_function_chain
->extern_decl_map
1044 && VAR_OR_FUNCTION_DECL_P (stmt
)
1045 && DECL_EXTERNAL (stmt
))
1047 struct cxx_int_tree_map
*h
, in
;
1048 in
.uid
= DECL_UID (stmt
);
1049 h
= cp_function_chain
->extern_decl_map
->find_with_hash (&in
, in
.uid
);
1058 /* Other than invisiref parms, don't walk the same tree twice. */
1059 if (p_set
->contains (stmt
))
1065 if (TREE_CODE (stmt
) == ADDR_EXPR
1066 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1068 /* If in an OpenMP context, note var uses. */
1069 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1070 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1071 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1072 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1075 else if (TREE_CODE (stmt
) == RETURN_EXPR
1076 && TREE_OPERAND (stmt
, 0)
1077 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1078 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1080 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
1081 switch (OMP_CLAUSE_CODE (stmt
))
1083 case OMP_CLAUSE_LASTPRIVATE
:
1084 /* Don't dereference an invisiref in OpenMP clauses. */
1085 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1088 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1089 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1090 cp_genericize_r
, data
, NULL
);
1093 case OMP_CLAUSE_PRIVATE
:
1094 /* Don't dereference an invisiref in OpenMP clauses. */
1095 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1097 else if (wtd
->omp_ctx
!= NULL
)
1099 /* Private clause doesn't cause any references to the
1100 var in outer contexts, avoid calling
1101 omp_cxx_notice_variable for it. */
1102 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1103 wtd
->omp_ctx
= NULL
;
1104 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1110 case OMP_CLAUSE_SHARED
:
1111 case OMP_CLAUSE_FIRSTPRIVATE
:
1112 case OMP_CLAUSE_COPYIN
:
1113 case OMP_CLAUSE_COPYPRIVATE
:
1114 /* Don't dereference an invisiref in OpenMP clauses. */
1115 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1118 case OMP_CLAUSE_REDUCTION
:
1119 /* Don't dereference an invisiref in reduction clause's
1120 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1121 still needs to be genericized. */
1122 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1125 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1126 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1127 cp_genericize_r
, data
, NULL
);
1128 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1129 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1130 cp_genericize_r
, data
, NULL
);
1136 else if (IS_TYPE_OR_DECL_P (stmt
))
1139 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1140 to lower this construct before scanning it, so we need to lower these
1141 before doing anything else. */
1142 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
1143 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1144 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1147 CLEANUP_BODY (stmt
),
1148 CLEANUP_EXPR (stmt
));
1150 else if (TREE_CODE (stmt
) == IF_STMT
)
1152 genericize_if_stmt (stmt_p
);
1153 /* *stmt_p has changed, tail recurse to handle it again. */
1154 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1157 /* COND_EXPR might have incompatible types in branches if one or both
1158 arms are bitfields. Fix it up now. */
1159 else if (TREE_CODE (stmt
) == COND_EXPR
)
1162 = (TREE_OPERAND (stmt
, 1)
1163 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1166 = (TREE_OPERAND (stmt
, 2)
1167 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1170 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1171 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1173 TREE_OPERAND (stmt
, 1)
1174 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1175 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1179 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1180 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1182 TREE_OPERAND (stmt
, 2)
1183 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1184 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1189 else if (TREE_CODE (stmt
) == BIND_EXPR
)
1191 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1194 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1196 && !DECL_EXTERNAL (decl
)
1197 && omp_var_to_track (decl
))
1200 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1201 (splay_tree_key
) decl
);
1203 splay_tree_insert (wtd
->omp_ctx
->variables
,
1204 (splay_tree_key
) decl
,
1206 ? OMP_CLAUSE_DEFAULT_SHARED
1207 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1211 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1213 /* The point here is to not sanitize static initializers. */
1214 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1215 wtd
->no_sanitize_p
= true;
1216 for (tree decl
= BIND_EXPR_VARS (stmt
);
1218 decl
= DECL_CHAIN (decl
))
1220 && TREE_STATIC (decl
)
1221 && DECL_INITIAL (decl
))
1222 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1223 wtd
->no_sanitize_p
= no_sanitize_p
;
1225 wtd
->bind_expr_stack
.safe_push (stmt
);
1226 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1227 cp_genericize_r
, data
, NULL
);
1228 wtd
->bind_expr_stack
.pop ();
1231 else if (TREE_CODE (stmt
) == USING_STMT
)
1233 tree block
= NULL_TREE
;
1235 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1236 BLOCK, and append an IMPORTED_DECL to its
1237 BLOCK_VARS chained list. */
1238 if (wtd
->bind_expr_stack
.exists ())
1241 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1242 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1247 tree using_directive
;
1248 gcc_assert (TREE_OPERAND (stmt
, 0));
1250 using_directive
= make_node (IMPORTED_DECL
);
1251 TREE_TYPE (using_directive
) = void_type_node
;
1253 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1254 = TREE_OPERAND (stmt
, 0);
1255 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1256 BLOCK_VARS (block
) = using_directive
;
1258 /* The USING_STMT won't appear in GENERIC. */
1259 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1263 else if (TREE_CODE (stmt
) == DECL_EXPR
1264 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1266 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1267 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1270 else if (TREE_CODE (stmt
) == DECL_EXPR
)
1272 tree d
= DECL_EXPR_DECL (stmt
);
1273 if (TREE_CODE (d
) == VAR_DECL
)
1274 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1276 else if (TREE_CODE (stmt
) == OMP_PARALLEL
1277 || TREE_CODE (stmt
) == OMP_TASK
1278 || TREE_CODE (stmt
) == OMP_TASKLOOP
)
1280 struct cp_genericize_omp_taskreg omp_ctx
;
1285 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1286 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1287 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1288 omp_ctx
.outer
= wtd
->omp_ctx
;
1289 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1290 wtd
->omp_ctx
= &omp_ctx
;
1291 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1292 switch (OMP_CLAUSE_CODE (c
))
1294 case OMP_CLAUSE_SHARED
:
1295 case OMP_CLAUSE_PRIVATE
:
1296 case OMP_CLAUSE_FIRSTPRIVATE
:
1297 case OMP_CLAUSE_LASTPRIVATE
:
1298 decl
= OMP_CLAUSE_DECL (c
);
1299 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1301 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1304 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1305 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1306 ? OMP_CLAUSE_DEFAULT_SHARED
1307 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1308 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
1310 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1312 case OMP_CLAUSE_DEFAULT
:
1313 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1314 omp_ctx
.default_shared
= true;
1318 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1319 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1321 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1322 wtd
->omp_ctx
= omp_ctx
.outer
;
1323 splay_tree_delete (omp_ctx
.variables
);
1325 else if (TREE_CODE (stmt
) == TRY_BLOCK
)
1328 tree try_block
= wtd
->try_block
;
1329 wtd
->try_block
= stmt
;
1330 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1331 wtd
->try_block
= try_block
;
1332 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1334 else if (TREE_CODE (stmt
) == MUST_NOT_THROW_EXPR
)
1336 /* MUST_NOT_THROW_COND might be something else with TM. */
1337 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1340 tree try_block
= wtd
->try_block
;
1341 wtd
->try_block
= stmt
;
1342 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1343 wtd
->try_block
= try_block
;
1346 else if (TREE_CODE (stmt
) == THROW_EXPR
)
1348 location_t loc
= location_of (stmt
);
1349 if (TREE_NO_WARNING (stmt
))
1351 else if (wtd
->try_block
)
1353 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
1354 && warning_at (loc
, OPT_Wterminate
,
1355 "throw will always call terminate()")
1356 && cxx_dialect
>= cxx11
1357 && DECL_DESTRUCTOR_P (current_function_decl
))
1358 inform (loc
, "in C++11 destructors default to noexcept");
1362 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1363 && DECL_DESTRUCTOR_P (current_function_decl
)
1364 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1366 && (get_defaulted_eh_spec (current_function_decl
)
1367 == empty_except_spec
))
1368 warning_at (loc
, OPT_Wc__11_compat
,
1369 "in C++11 this throw will terminate because "
1370 "destructors default to noexcept");
1373 else if (TREE_CODE (stmt
) == CONVERT_EXPR
)
1374 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1375 else if (TREE_CODE (stmt
) == FOR_STMT
)
1376 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1377 else if (TREE_CODE (stmt
) == WHILE_STMT
)
1378 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1379 else if (TREE_CODE (stmt
) == DO_STMT
)
1380 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1381 else if (TREE_CODE (stmt
) == SWITCH_STMT
)
1382 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1383 else if (TREE_CODE (stmt
) == CONTINUE_STMT
)
1384 genericize_continue_stmt (stmt_p
);
1385 else if (TREE_CODE (stmt
) == BREAK_STMT
)
1386 genericize_break_stmt (stmt_p
);
1387 else if (TREE_CODE (stmt
) == OMP_FOR
1388 || TREE_CODE (stmt
) == OMP_SIMD
1389 || TREE_CODE (stmt
) == OMP_DISTRIBUTE
)
1390 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1391 else if ((flag_sanitize
1392 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1393 && !wtd
->no_sanitize_p
)
1395 if ((flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1396 && TREE_CODE (stmt
) == NOP_EXPR
1397 && TREE_CODE (TREE_TYPE (stmt
)) == REFERENCE_TYPE
)
1398 ubsan_maybe_instrument_reference (stmt
);
1399 else if (TREE_CODE (stmt
) == CALL_EXPR
)
1401 tree fn
= CALL_EXPR_FN (stmt
);
1403 && !error_operand_p (fn
)
1404 && POINTER_TYPE_P (TREE_TYPE (fn
))
1405 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1408 = TREE_CODE (fn
) == ADDR_EXPR
1409 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1410 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1411 if (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1412 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1413 if ((flag_sanitize
& SANITIZE_VPTR
) && !is_ctor
)
1414 cp_ubsan_maybe_instrument_member_call (stmt
);
1419 p_set
->add (*stmt_p
);
1424 /* Lower C++ front end trees to GENERIC in T_P. */
1427 cp_genericize_tree (tree
* t_p
)
1429 struct cp_genericize_data wtd
;
1431 wtd
.p_set
= new hash_set
<tree
>;
1432 wtd
.bind_expr_stack
.create (0);
1434 wtd
.try_block
= NULL_TREE
;
1435 wtd
.no_sanitize_p
= false;
1436 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1438 wtd
.bind_expr_stack
.release ();
1439 if (flag_sanitize
& SANITIZE_VPTR
)
1440 cp_ubsan_instrument_member_accesses (t_p
);
1443 /* If a function that should end with a return in non-void
1444 function doesn't obviously end with return, add ubsan
1445 instrumentation code to verify it at runtime. */
1448 cp_ubsan_maybe_instrument_return (tree fndecl
)
1450 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1451 || DECL_CONSTRUCTOR_P (fndecl
)
1452 || DECL_DESTRUCTOR_P (fndecl
)
1453 || !targetm
.warn_func_return (fndecl
))
1456 tree t
= DECL_SAVED_TREE (fndecl
);
1459 switch (TREE_CODE (t
))
1462 t
= BIND_EXPR_BODY (t
);
1464 case TRY_FINALLY_EXPR
:
1465 t
= TREE_OPERAND (t
, 0);
1467 case STATEMENT_LIST
:
1469 tree_stmt_iterator i
= tsi_last (t
);
1486 t
= DECL_SAVED_TREE (fndecl
);
1487 if (TREE_CODE (t
) == BIND_EXPR
1488 && TREE_CODE (BIND_EXPR_BODY (t
)) == STATEMENT_LIST
)
1490 tree_stmt_iterator i
= tsi_last (BIND_EXPR_BODY (t
));
1491 t
= ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl
));
1492 tsi_link_after (&i
, t
, TSI_NEW_STMT
);
1497 cp_genericize (tree fndecl
)
1501 /* Fix up the types of parms passed by invisible reference. */
1502 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1503 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1505 /* If a function's arguments are copied to create a thunk,
1506 then DECL_BY_REFERENCE will be set -- but the type of the
1507 argument will be a pointer type, so we will never get
1509 gcc_assert (!DECL_BY_REFERENCE (t
));
1510 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1511 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1512 DECL_BY_REFERENCE (t
) = 1;
1513 TREE_ADDRESSABLE (t
) = 0;
1517 /* Do the same for the return value. */
1518 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1520 t
= DECL_RESULT (fndecl
);
1521 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1522 DECL_BY_REFERENCE (t
) = 1;
1523 TREE_ADDRESSABLE (t
) = 0;
1527 /* Adjust DECL_VALUE_EXPR of the original var. */
1528 tree outer
= outer_curly_brace_block (current_function_decl
);
1532 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1533 if (DECL_NAME (t
) == DECL_NAME (var
)
1534 && DECL_HAS_VALUE_EXPR_P (var
)
1535 && DECL_VALUE_EXPR (var
) == t
)
1537 tree val
= convert_from_reference (t
);
1538 SET_DECL_VALUE_EXPR (var
, val
);
1544 /* If we're a clone, the body is already GIMPLE. */
1545 if (DECL_CLONED_FUNCTION_P (fndecl
))
1548 /* Expand all the array notations here. */
1550 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl
)))
1551 DECL_SAVED_TREE (fndecl
) =
1552 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl
));
1554 /* We do want to see every occurrence of the parms, so we can't just use
1555 walk_tree's hash functionality. */
1556 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
));
1558 if (flag_sanitize
& SANITIZE_RETURN
1559 && do_ubsan_in_current_function ())
1560 cp_ubsan_maybe_instrument_return (fndecl
);
1562 /* Do everything else. */
1563 c_genericize (fndecl
);
1565 gcc_assert (bc_label
[bc_break
] == NULL
);
1566 gcc_assert (bc_label
[bc_continue
] == NULL
);
1569 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1570 NULL if there is in fact nothing to do. ARG2 may be null if FN
1571 actually only takes one argument. */
1574 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1576 tree defparm
, parm
, t
;
1584 nargs
= list_length (DECL_ARGUMENTS (fn
));
1585 argarray
= XALLOCAVEC (tree
, nargs
);
1587 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1589 defparm
= TREE_CHAIN (defparm
);
1591 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1593 tree inner_type
= TREE_TYPE (arg1
);
1594 tree start1
, end1
, p1
;
1595 tree start2
= NULL
, p2
= NULL
;
1596 tree ret
= NULL
, lab
;
1602 inner_type
= TREE_TYPE (inner_type
);
1603 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1604 size_zero_node
, NULL
, NULL
);
1606 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1607 size_zero_node
, NULL
, NULL
);
1609 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1610 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1612 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1614 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1615 end1
= fold_build_pointer_plus (start1
, end1
);
1617 p1
= create_tmp_var (TREE_TYPE (start1
));
1618 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1619 append_to_statement_list (t
, &ret
);
1623 p2
= create_tmp_var (TREE_TYPE (start2
));
1624 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1625 append_to_statement_list (t
, &ret
);
1628 lab
= create_artificial_label (input_location
);
1629 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1630 append_to_statement_list (t
, &ret
);
1635 /* Handle default arguments. */
1636 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1637 parm
= TREE_CHAIN (parm
), i
++)
1638 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1639 TREE_PURPOSE (parm
), fn
, i
,
1640 tf_warning_or_error
);
1641 t
= build_call_a (fn
, i
, argarray
);
1642 t
= fold_convert (void_type_node
, t
);
1643 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1644 append_to_statement_list (t
, &ret
);
1646 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1647 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1648 append_to_statement_list (t
, &ret
);
1652 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1653 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1654 append_to_statement_list (t
, &ret
);
1657 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1658 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1659 append_to_statement_list (t
, &ret
);
1665 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1667 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1668 /* Handle default arguments. */
1669 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1670 parm
= TREE_CHAIN (parm
), i
++)
1671 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1672 TREE_PURPOSE (parm
),
1673 fn
, i
, tf_warning_or_error
);
1674 t
= build_call_a (fn
, i
, argarray
);
1675 t
= fold_convert (void_type_node
, t
);
1676 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1680 /* Return code to initialize DECL with its default constructor, or
1681 NULL if there's nothing to do. */
1684 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1686 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1690 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1695 /* Return code to initialize DST with a copy constructor from SRC. */
1698 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1700 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1704 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1706 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1711 /* Similarly, except use an assignment operator instead. */
1714 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1716 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1720 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1722 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1727 /* Return code to destroy DECL. */
1730 cxx_omp_clause_dtor (tree clause
, tree decl
)
1732 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1736 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1741 /* True if OpenMP should privatize what this DECL points to rather
1742 than the DECL itself. */
1745 cxx_omp_privatize_by_reference (const_tree decl
)
1747 return (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
1748 || is_invisiref_parm (decl
));
1751 /* Return true if DECL is const qualified var having no mutable member. */
1753 cxx_omp_const_qual_no_mutable (tree decl
)
1755 tree type
= TREE_TYPE (decl
);
1756 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1758 if (!is_invisiref_parm (decl
))
1760 type
= TREE_TYPE (type
);
1762 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1764 /* NVR doesn't preserve const qualification of the
1766 tree outer
= outer_curly_brace_block (current_function_decl
);
1770 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1771 if (DECL_NAME (decl
) == DECL_NAME (var
)
1772 && (TYPE_MAIN_VARIANT (type
)
1773 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1775 if (TYPE_READONLY (TREE_TYPE (var
)))
1776 type
= TREE_TYPE (var
);
1782 if (type
== error_mark_node
)
1785 /* Variables with const-qualified type having no mutable member
1786 are predetermined shared. */
1787 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1793 /* True if OpenMP sharing attribute of DECL is predetermined. */
1795 enum omp_clause_default_kind
1796 cxx_omp_predetermined_sharing (tree decl
)
1798 /* Static data members are predetermined shared. */
1799 if (TREE_STATIC (decl
))
1801 tree ctx
= CP_DECL_CONTEXT (decl
);
1802 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1803 return OMP_CLAUSE_DEFAULT_SHARED
;
1806 /* Const qualified vars having no mutable member are predetermined
1808 if (cxx_omp_const_qual_no_mutable (decl
))
1809 return OMP_CLAUSE_DEFAULT_SHARED
;
1811 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1814 /* Finalize an implicitly determined clause. */
1817 cxx_omp_finish_clause (tree c
, gimple_seq
*)
1819 tree decl
, inner_type
;
1820 bool make_shared
= false;
1822 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1825 decl
= OMP_CLAUSE_DECL (c
);
1826 decl
= require_complete_type (decl
);
1827 inner_type
= TREE_TYPE (decl
);
1828 if (decl
== error_mark_node
)
1830 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1831 inner_type
= TREE_TYPE (inner_type
);
1833 /* We're interested in the base element, not arrays. */
1834 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1835 inner_type
= TREE_TYPE (inner_type
);
1837 /* Check for special function availability by building a call to one.
1838 Save the results, because later we won't be in the right context
1839 for making these queries. */
1841 && CLASS_TYPE_P (inner_type
)
1842 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
1846 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
1849 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1850 disregarded in OpenMP construct, because it is going to be
1851 remapped during OpenMP lowering. SHARED is true if DECL
1852 is going to be shared, false if it is going to be privatized. */
1855 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
1859 && DECL_HAS_VALUE_EXPR_P (decl
)
1860 && DECL_ARTIFICIAL (decl
)
1861 && DECL_LANG_SPECIFIC (decl
)
1862 && DECL_OMP_PRIVATIZED_MEMBER (decl
);
1865 /* Perform folding on expression X. */
1868 cp_fully_fold (tree x
)
1873 /* Fold expression X which is used as an rvalue if RVAL is true. */
1876 cp_fold_maybe_rvalue (tree x
, bool rval
)
1878 if (rval
&& DECL_P (x
))
1880 tree v
= decl_constant_value (x
);
1881 if (v
!= error_mark_node
)
1887 /* Fold expression X which is used as an rvalue. */
1890 cp_fold_rvalue (tree x
)
1892 return cp_fold_maybe_rvalue (x
, true);
1895 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
1896 and certain changes are made to the folding done. Or should be (FIXME). We
1897 never touch maybe_const, as it is only used for the C front-end
1898 C_MAYBE_CONST_EXPR. */
1901 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
)
1903 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
1905 return cp_fold_rvalue (x
);
1908 static GTY((cache
, deletable
)) cache_map fold_cache
;
1910 /* Dispose of the whole FOLD_CACHE. */
1913 clear_fold_cache (void)
1915 gt_cleare_cache (fold_cache
);
1918 /* This function tries to fold an expression X.
1919 To avoid combinatorial explosion, folding results are kept in fold_cache.
1920 If we are processing a template or X is invalid, we don't fold at all.
1921 For performance reasons we don't cache expressions representing a
1922 declaration or constant.
1923 Function returns X or its folded variant. */
1928 tree op0
, op1
, op2
, op3
;
1929 tree org_x
= x
, r
= NULL_TREE
;
1930 enum tree_code code
;
1932 bool rval_ops
= true;
1934 if (!x
|| x
== error_mark_node
)
1937 if (processing_template_decl
1938 || (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
)))
1941 /* Don't bother to cache DECLs or constants. */
1942 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
1945 if (tree cached
= fold_cache
.get (x
))
1948 code
= TREE_CODE (x
);
1952 x
= fold_sizeof_expr (x
);
1955 case VIEW_CONVERT_EXPR
:
1959 case NON_LVALUE_EXPR
:
1961 if (VOID_TYPE_P (TREE_TYPE (x
)))
1964 loc
= EXPR_LOCATION (x
);
1965 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
1967 if (code
== CONVERT_EXPR
1968 && SCALAR_TYPE_P (TREE_TYPE (x
))
1969 && op0
!= void_node
)
1970 /* During parsing we used convert_to_*_nofold; re-convert now using the
1971 folding variants, since fold() doesn't do those transformations. */
1972 x
= fold (convert (TREE_TYPE (x
), op0
));
1973 else if (op0
!= TREE_OPERAND (x
, 0))
1975 if (op0
== error_mark_node
)
1976 x
= error_mark_node
;
1978 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
1983 /* Conversion of an out-of-range value has implementation-defined
1984 behavior; the language considers it different from arithmetic
1985 overflow, which is undefined. */
1986 if (TREE_CODE (op0
) == INTEGER_CST
1987 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
1988 TREE_OVERFLOW (x
) = false;
1997 case FIX_TRUNC_EXPR
:
2002 case TRUTH_NOT_EXPR
:
2003 case FIXED_CONVERT_EXPR
:
2006 loc
= EXPR_LOCATION (x
);
2007 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2009 if (op0
!= TREE_OPERAND (x
, 0))
2011 if (op0
== error_mark_node
)
2012 x
= error_mark_node
;
2014 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2019 gcc_assert (TREE_CODE (x
) != COND_EXPR
2020 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
2023 case UNARY_PLUS_EXPR
:
2024 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2025 if (op0
== error_mark_node
)
2026 x
= error_mark_node
;
2028 x
= fold_convert (TREE_TYPE (x
), op0
);
2031 case POSTDECREMENT_EXPR
:
2032 case POSTINCREMENT_EXPR
:
2034 case PREDECREMENT_EXPR
:
2035 case PREINCREMENT_EXPR
:
2039 case POINTER_PLUS_EXPR
:
2043 case TRUNC_DIV_EXPR
:
2045 case FLOOR_DIV_EXPR
:
2046 case ROUND_DIV_EXPR
:
2047 case TRUNC_MOD_EXPR
:
2049 case ROUND_MOD_EXPR
:
2051 case EXACT_DIV_EXPR
:
2061 case TRUTH_AND_EXPR
:
2062 case TRUTH_ANDIF_EXPR
:
2064 case TRUTH_ORIF_EXPR
:
2065 case TRUTH_XOR_EXPR
:
2066 case LT_EXPR
: case LE_EXPR
:
2067 case GT_EXPR
: case GE_EXPR
:
2068 case EQ_EXPR
: case NE_EXPR
:
2069 case UNORDERED_EXPR
: case ORDERED_EXPR
:
2070 case UNLT_EXPR
: case UNLE_EXPR
:
2071 case UNGT_EXPR
: case UNGE_EXPR
:
2072 case UNEQ_EXPR
: case LTGT_EXPR
:
2073 case RANGE_EXPR
: case COMPLEX_EXPR
:
2075 loc
= EXPR_LOCATION (x
);
2076 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2077 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1));
2079 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
2081 if (op0
== error_mark_node
|| op1
== error_mark_node
)
2082 x
= error_mark_node
;
2084 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
2089 if (TREE_NO_WARNING (org_x
)
2090 && warn_nonnull_compare
2091 && COMPARISON_CLASS_P (org_x
))
2093 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
2095 else if (COMPARISON_CLASS_P (x
))
2096 TREE_NO_WARNING (x
) = 1;
2097 /* Otherwise give up on optimizing these, let GIMPLE folders
2098 optimize those later on. */
2099 else if (op0
!= TREE_OPERAND (org_x
, 0)
2100 || op1
!= TREE_OPERAND (org_x
, 1))
2102 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
2103 TREE_NO_WARNING (x
) = 1;
2113 /* Don't bother folding a void condition, since it can't produce a
2114 constant value. Also, some statement-level uses of COND_EXPR leave
2115 one of the branches NULL, so folding would crash. */
2116 if (VOID_TYPE_P (TREE_TYPE (x
)))
2119 loc
= EXPR_LOCATION (x
);
2120 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2121 op1
= cp_fold (TREE_OPERAND (x
, 1));
2122 op2
= cp_fold (TREE_OPERAND (x
, 2));
2124 if (op0
!= TREE_OPERAND (x
, 0)
2125 || op1
!= TREE_OPERAND (x
, 1)
2126 || op2
!= TREE_OPERAND (x
, 2))
2128 if (op0
== error_mark_node
2129 || op1
== error_mark_node
2130 || op2
== error_mark_node
)
2131 x
= error_mark_node
;
2133 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2138 /* A COND_EXPR might have incompatible types in branches if one or both
2139 arms are bitfields. If folding exposed such a branch, fix it up. */
2140 if (TREE_CODE (x
) != code
)
2141 if (tree type
= is_bitfield_expr_with_lowered_type (x
))
2142 x
= fold_convert (type
, x
);
2148 int i
, m
, sv
= optimize
, nw
= sv
, changed
= 0;
2149 tree callee
= get_callee_fndecl (x
);
2151 /* Some built-in function calls will be evaluated at compile-time in
2152 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2153 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2154 if (callee
&& DECL_BUILT_IN (callee
) && !optimize
2155 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
2156 && current_function_decl
2157 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
2162 m
= call_expr_nargs (x
);
2163 for (i
= 0; i
< m
; i
++)
2165 r
= cp_fold (CALL_EXPR_ARG (x
, i
));
2166 if (r
!= CALL_EXPR_ARG (x
, i
))
2168 if (r
== error_mark_node
)
2170 x
= error_mark_node
;
2175 CALL_EXPR_ARG (x
, i
) = r
;
2177 if (x
== error_mark_node
)
2184 if (TREE_CODE (r
) != CALL_EXPR
)
2192 /* Invoke maybe_constant_value for functions declared
2193 constexpr and not called with AGGR_INIT_EXPRs.
2195 Do constexpr expansion of expressions where the call itself is not
2196 constant, but the call followed by an INDIRECT_REF is. */
2197 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
2199 r
= maybe_constant_value (x
);
2202 if (TREE_CODE (r
) != CALL_EXPR
)
2217 bool changed
= false;
2218 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
2219 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
2220 vec_safe_reserve (nelts
, vec_safe_length (elts
));
2221 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
2223 tree op
= cp_fold (p
->value
);
2224 constructor_elt e
= { p
->index
, op
};
2225 nelts
->quick_push (e
);
2228 if (op
== error_mark_node
)
2230 x
= error_mark_node
;
2238 x
= build_constructor (TREE_TYPE (x
), nelts
);
2245 bool changed
= false;
2246 vec
<tree
, va_gc
> *vec
= make_tree_vector ();
2247 int i
, n
= TREE_VEC_LENGTH (x
);
2248 vec_safe_reserve (vec
, n
);
2250 for (i
= 0; i
< n
; i
++)
2252 tree op
= cp_fold (TREE_VEC_ELT (x
, i
));
2253 vec
->quick_push (op
);
2254 if (op
!= TREE_VEC_ELT (x
, i
))
2261 for (i
= 0; i
< n
; i
++)
2262 TREE_VEC_ELT (r
, i
) = (*vec
)[i
];
2266 release_tree_vector (vec
);
2272 case ARRAY_RANGE_REF
:
2274 loc
= EXPR_LOCATION (x
);
2275 op0
= cp_fold (TREE_OPERAND (x
, 0));
2276 op1
= cp_fold (TREE_OPERAND (x
, 1));
2277 op2
= cp_fold (TREE_OPERAND (x
, 2));
2278 op3
= cp_fold (TREE_OPERAND (x
, 3));
2280 if (op0
!= TREE_OPERAND (x
, 0)
2281 || op1
!= TREE_OPERAND (x
, 1)
2282 || op2
!= TREE_OPERAND (x
, 2)
2283 || op3
!= TREE_OPERAND (x
, 3))
2285 if (op0
== error_mark_node
2286 || op1
== error_mark_node
2287 || op2
== error_mark_node
2288 || op3
== error_mark_node
)
2289 x
= error_mark_node
;
2291 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
2301 fold_cache
.put (org_x
, x
);
2302 /* Prevent that we try to fold an already folded result again. */
2304 fold_cache
.put (x
, x
);
2309 #include "gt-cp-cp-gimplify.h"