1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
30 #include "c-family/c-common.h"
33 #include "stor-layout.h"
34 #include "tree-iterator.h"
35 #include "internal-fn.h"
38 #include "c-family/c-ubsan.h"
41 /* Forward declarations. */
43 static tree
cp_genericize_r (tree
*, int *, void *);
44 static void cp_genericize_tree (tree
*);
46 /* Local declarations. */
48 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
50 /* Stack of labels which are targets for "break" or "continue",
51 linked through TREE_CHAIN. */
52 static tree bc_label
[2];
54 /* Begin a scope which can be exited by a break or continue statement. BC
57 Just creates a label with location LOCATION and pushes it into the current
61 begin_bc_block (enum bc_t bc
, location_t location
)
63 tree label
= create_artificial_label (location
);
64 DECL_CHAIN (label
) = bc_label
[bc
];
67 LABEL_DECL_BREAK (label
) = true;
69 LABEL_DECL_CONTINUE (label
) = true;
73 /* Finish a scope which can be exited by a break or continue statement.
74 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
75 an expression for the contents of the scope.
77 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
78 BLOCK. Otherwise, just forget the label. */
81 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
83 gcc_assert (label
== bc_label
[bc
]);
85 if (TREE_USED (label
))
86 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
89 bc_label
[bc
] = DECL_CHAIN (label
);
90 DECL_CHAIN (label
) = NULL_TREE
;
93 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
94 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
95 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
96 of gimplify_cilk_spawn. */
99 cilk_cp_gimplify_call_params_in_spawned_fn (tree
*expr_p
, gimple_seq
*pre_p
,
104 cilk_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
105 if (TREE_CODE (*expr_p
) == AGGR_INIT_EXPR
)
106 for (ii
= 0; ii
< aggr_init_expr_nargs (*expr_p
); ii
++)
107 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p
, ii
), pre_p
, post_p
,
108 is_gimple_reg
, fb_rvalue
);
112 /* Get the LABEL_EXPR to represent a break or continue statement
113 in the current block scope. BC indicates which. */
116 get_bc_label (enum bc_t bc
)
118 tree label
= bc_label
[bc
];
120 /* Mark the label used for finish_bc_block. */
121 TREE_USED (label
) = 1;
125 /* Genericize a TRY_BLOCK. */
128 genericize_try_block (tree
*stmt_p
)
130 tree body
= TRY_STMTS (*stmt_p
);
131 tree cleanup
= TRY_HANDLERS (*stmt_p
);
133 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
136 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
139 genericize_catch_block (tree
*stmt_p
)
141 tree type
= HANDLER_TYPE (*stmt_p
);
142 tree body
= HANDLER_BODY (*stmt_p
);
144 /* FIXME should the caught type go in TREE_TYPE? */
145 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
148 /* A terser interface for building a representation of an exception
152 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
156 /* FIXME should the allowed types go in TREE_TYPE? */
157 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
158 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
160 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
161 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
166 /* Genericize an EH_SPEC_BLOCK by converting it to a
167 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
170 genericize_eh_spec_block (tree
*stmt_p
)
172 tree body
= EH_SPEC_STMTS (*stmt_p
);
173 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
174 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
176 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
177 TREE_NO_WARNING (*stmt_p
) = true;
178 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
181 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
184 genericize_if_stmt (tree
*stmt_p
)
186 tree stmt
, cond
, then_
, else_
;
187 location_t locus
= EXPR_LOCATION (*stmt_p
);
190 cond
= IF_COND (stmt
);
191 then_
= THEN_CLAUSE (stmt
);
192 else_
= ELSE_CLAUSE (stmt
);
195 then_
= build_empty_stmt (locus
);
197 else_
= build_empty_stmt (locus
);
199 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
201 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
204 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
205 if (!EXPR_HAS_LOCATION (stmt
))
206 protected_set_expr_location (stmt
, locus
);
210 /* Build a generic representation of one of the C loop forms. COND is the
211 loop condition or NULL_TREE. BODY is the (possibly compound) statement
212 controlled by the loop. INCR is the increment expression of a for-loop,
213 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
214 evaluated before the loop body as in while and for loops, or after the
215 loop body as in do-while loops. */
218 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
219 tree incr
, bool cond_is_first
, int *walk_subtrees
,
224 tree stmt_list
= NULL
;
226 blab
= begin_bc_block (bc_break
, start_locus
);
227 clab
= begin_bc_block (bc_continue
, start_locus
);
229 protected_set_expr_location (incr
, start_locus
);
231 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
232 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
233 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
236 if (cond
&& TREE_CODE (cond
) != INTEGER_CST
)
238 /* If COND is constant, don't bother building an exit. If it's false,
239 we won't build a loop. If it's true, any exits are in the body. */
240 location_t cloc
= EXPR_LOC_OR_LOC (cond
, start_locus
);
241 exit
= build1_loc (cloc
, GOTO_EXPR
, void_type_node
,
242 get_bc_label (bc_break
));
243 exit
= fold_build3_loc (cloc
, COND_EXPR
, void_type_node
, cond
,
244 build_empty_stmt (cloc
), exit
);
247 if (exit
&& cond_is_first
)
248 append_to_statement_list (exit
, &stmt_list
);
249 append_to_statement_list (body
, &stmt_list
);
250 finish_bc_block (&stmt_list
, bc_continue
, clab
);
251 append_to_statement_list (incr
, &stmt_list
);
252 if (exit
&& !cond_is_first
)
253 append_to_statement_list (exit
, &stmt_list
);
256 stmt_list
= build_empty_stmt (start_locus
);
259 if (cond
&& integer_zerop (cond
))
262 loop
= fold_build3_loc (start_locus
, COND_EXPR
,
263 void_type_node
, cond
, stmt_list
,
264 build_empty_stmt (start_locus
));
269 loop
= build1_loc (start_locus
, LOOP_EXPR
, void_type_node
, stmt_list
);
272 append_to_statement_list (loop
, &stmt_list
);
273 finish_bc_block (&stmt_list
, bc_break
, blab
);
275 stmt_list
= build_empty_stmt (start_locus
);
280 /* Genericize a FOR_STMT node *STMT_P. */
283 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
288 tree init
= FOR_INIT_STMT (stmt
);
292 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
293 append_to_statement_list (init
, &expr
);
296 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
297 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
298 append_to_statement_list (loop
, &expr
);
299 if (expr
== NULL_TREE
)
304 /* Genericize a WHILE_STMT node *STMT_P. */
307 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
310 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
311 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
314 /* Genericize a DO_STMT node *STMT_P. */
317 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
320 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
321 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
324 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
327 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
330 tree break_block
, body
, cond
, type
;
331 location_t stmt_locus
= EXPR_LOCATION (stmt
);
333 break_block
= begin_bc_block (bc_break
, stmt_locus
);
335 body
= SWITCH_STMT_BODY (stmt
);
337 body
= build_empty_stmt (stmt_locus
);
338 cond
= SWITCH_STMT_COND (stmt
);
339 type
= SWITCH_STMT_TYPE (stmt
);
341 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
342 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
343 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
346 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
347 finish_bc_block (stmt_p
, bc_break
, break_block
);
350 /* Genericize a CONTINUE_STMT node *STMT_P. */
353 genericize_continue_stmt (tree
*stmt_p
)
355 tree stmt_list
= NULL
;
356 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
357 tree label
= get_bc_label (bc_continue
);
358 location_t location
= EXPR_LOCATION (*stmt_p
);
359 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
360 append_to_statement_list (pred
, &stmt_list
);
361 append_to_statement_list (jump
, &stmt_list
);
365 /* Genericize a BREAK_STMT node *STMT_P. */
368 genericize_break_stmt (tree
*stmt_p
)
370 tree label
= get_bc_label (bc_break
);
371 location_t location
= EXPR_LOCATION (*stmt_p
);
372 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
375 /* Genericize a OMP_FOR node *STMT_P. */
378 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
381 location_t locus
= EXPR_LOCATION (stmt
);
382 tree clab
= begin_bc_block (bc_continue
, locus
);
384 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
385 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
386 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
387 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
388 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
389 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
392 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
395 /* Hook into the middle of gimplifying an OMP_FOR node. */
397 static enum gimplify_status
398 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
400 tree for_stmt
= *expr_p
;
401 gimple_seq seq
= NULL
;
403 /* Protect ourselves from recursion. */
404 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
406 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
408 gimplify_and_add (for_stmt
, &seq
);
409 gimple_seq_add_seq (pre_p
, seq
);
411 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
416 /* Gimplify an EXPR_STMT node. */
419 gimplify_expr_stmt (tree
*stmt_p
)
421 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
423 if (stmt
== error_mark_node
)
426 /* Gimplification of a statement expression will nullify the
427 statement if all its side effects are moved to *PRE_P and *POST_P.
429 In this case we will not want to emit the gimplified statement.
430 However, we may still want to emit a warning, so we do that before
432 if (stmt
&& warn_unused_value
)
434 if (!TREE_SIDE_EFFECTS (stmt
))
436 if (!IS_EMPTY_STMT (stmt
)
437 && !VOID_TYPE_P (TREE_TYPE (stmt
))
438 && !TREE_NO_WARNING (stmt
))
439 warning (OPT_Wunused_value
, "statement with no effect");
442 warn_if_unused_value (stmt
, input_location
);
445 if (stmt
== NULL_TREE
)
446 stmt
= alloc_stmt_list ();
451 /* Gimplify initialization from an AGGR_INIT_EXPR. */
454 cp_gimplify_init_expr (tree
*expr_p
)
456 tree from
= TREE_OPERAND (*expr_p
, 1);
457 tree to
= TREE_OPERAND (*expr_p
, 0);
460 /* What about code that pulls out the temp and uses it elsewhere? I
461 think that such code never uses the TARGET_EXPR as an initializer. If
462 I'm wrong, we'll abort because the temp won't have any RTL. In that
463 case, I guess we'll need to replace references somehow. */
464 if (TREE_CODE (from
) == TARGET_EXPR
)
465 from
= TARGET_EXPR_INITIAL (from
);
467 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
468 inside the TARGET_EXPR. */
471 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
473 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
474 replace the slot operand with our target.
476 Should we add a target parm to gimplify_expr instead? No, as in this
477 case we want to replace the INIT_EXPR. */
478 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
479 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
481 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
482 AGGR_INIT_EXPR_SLOT (sub
) = to
;
484 VEC_INIT_EXPR_SLOT (sub
) = to
;
487 /* The initialization is now a side-effect, so the container can
490 TREE_TYPE (from
) = void_type_node
;
493 if (cxx_dialect
>= cxx14
&& TREE_CODE (sub
) == CONSTRUCTOR
)
494 /* Handle aggregate NSDMI. */
495 replace_placeholders (sub
, to
);
500 t
= TREE_OPERAND (t
, 1);
505 /* Gimplify a MUST_NOT_THROW_EXPR. */
507 static enum gimplify_status
508 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
511 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
512 tree body
= TREE_OPERAND (stmt
, 0);
513 gimple_seq try_
= NULL
;
514 gimple_seq catch_
= NULL
;
517 gimplify_and_add (body
, &try_
);
518 mnt
= gimple_build_eh_must_not_throw (terminate_node
);
519 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
520 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
522 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
533 /* Return TRUE if an operand (OP) of a given TYPE being copied is
534 really just an empty class copy.
536 Check that the operand has a simple form so that TARGET_EXPRs and
537 non-empty CONSTRUCTORs get reduced properly, and we leave the
538 return slot optimization alone because it isn't a copy. */
541 simple_empty_class_p (tree type
, tree op
)
544 ((TREE_CODE (op
) == COMPOUND_EXPR
545 && simple_empty_class_p (type
, TREE_OPERAND (op
, 1)))
546 || is_gimple_lvalue (op
)
547 || INDIRECT_REF_P (op
)
548 || (TREE_CODE (op
) == CONSTRUCTOR
549 && CONSTRUCTOR_NELTS (op
) == 0
550 && !TREE_CLOBBER_P (op
))
551 || (TREE_CODE (op
) == CALL_EXPR
552 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
553 && is_really_empty_class (type
);
556 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
559 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
561 int saved_stmts_are_full_exprs_p
= 0;
562 enum tree_code code
= TREE_CODE (*expr_p
);
563 enum gimplify_status ret
;
565 if (STATEMENT_CODE_P (code
))
567 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
568 current_stmt_tree ()->stmts_are_full_exprs_p
569 = STMT_IS_FULL_EXPR_P (*expr_p
);
575 *expr_p
= cplus_expand_constant (*expr_p
);
580 simplify_aggr_init_expr (expr_p
);
586 location_t loc
= input_location
;
587 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
588 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
589 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
590 input_location
= EXPR_LOCATION (*expr_p
);
591 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
592 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
594 tf_warning_or_error
);
595 cp_genericize_tree (expr_p
);
597 input_location
= loc
;
602 /* FIXME communicate throw type to back end, probably by moving
603 THROW_EXPR into ../tree.def. */
604 *expr_p
= TREE_OPERAND (*expr_p
, 0);
608 case MUST_NOT_THROW_EXPR
:
609 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
612 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
613 LHS of an assignment might also be involved in the RHS, as in bug
616 if (fn_contains_cilk_spawn_p (cfun
)
617 && cilk_detect_spawn_and_unwrap (expr_p
)
620 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
621 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
623 cp_gimplify_init_expr (expr_p
);
624 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
626 /* Otherwise fall through. */
630 if (fn_contains_cilk_spawn_p (cfun
)
631 && cilk_detect_spawn_and_unwrap (expr_p
)
634 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
635 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
637 /* If the back end isn't clever enough to know that the lhs and rhs
638 types are the same, add an explicit conversion. */
639 tree op0
= TREE_OPERAND (*expr_p
, 0);
640 tree op1
= TREE_OPERAND (*expr_p
, 1);
642 if (!error_operand_p (op0
)
643 && !error_operand_p (op1
)
644 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
645 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
646 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
647 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
648 TREE_TYPE (op0
), op1
);
650 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
))
652 /* Remove any copies of empty classes. Also drop volatile
653 variables on the RHS to avoid infinite recursion from
654 gimplify_expr trying to load the value. */
655 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
656 is_gimple_lvalue
, fb_lvalue
);
657 if (TREE_SIDE_EFFECTS (op1
))
659 if (TREE_THIS_VOLATILE (op1
)
660 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
661 op1
= build_fold_addr_expr (op1
);
663 gimplify_and_add (op1
, pre_p
);
665 *expr_p
= TREE_OPERAND (*expr_p
, 0);
671 case EMPTY_CLASS_EXPR
:
672 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
673 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
678 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
683 genericize_try_block (expr_p
);
688 genericize_catch_block (expr_p
);
693 genericize_eh_spec_block (expr_p
);
712 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
716 gimplify_expr_stmt (expr_p
);
720 case UNARY_PLUS_EXPR
:
722 tree arg
= TREE_OPERAND (*expr_p
, 0);
723 tree type
= TREE_TYPE (*expr_p
);
724 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
730 case CILK_SPAWN_STMT
:
732 (fn_contains_cilk_spawn_p (cfun
)
733 && cilk_detect_spawn_and_unwrap (expr_p
));
735 /* If errors are seen, then just process it as a CALL_EXPR. */
738 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
739 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
742 if (fn_contains_cilk_spawn_p (cfun
)
743 && cilk_detect_spawn_and_unwrap (expr_p
)
746 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
747 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
749 /* DR 1030 says that we need to evaluate the elements of an
750 initializer-list in forward order even when it's used as arguments to
751 a constructor. So if the target wants to evaluate them in reverse
752 order and there's more than one argument other than 'this', gimplify
755 if (PUSH_ARGS_REVERSED
&& CALL_EXPR_LIST_INIT_P (*expr_p
)
756 && call_expr_nargs (*expr_p
) > 2)
758 int nargs
= call_expr_nargs (*expr_p
);
759 location_t loc
= EXPR_LOC_OR_LOC (*expr_p
, input_location
);
760 for (int i
= 1; i
< nargs
; ++i
)
762 enum gimplify_status t
763 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
);
771 if (TREE_OPERAND (*expr_p
, 0)
772 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
773 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
775 expr_p
= &TREE_OPERAND (*expr_p
, 0);
776 code
= TREE_CODE (*expr_p
);
777 /* Avoid going through the INIT_EXPR case, which can
778 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
779 goto modify_expr_case
;
784 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
788 /* Restore saved state. */
789 if (STATEMENT_CODE_P (code
))
790 current_stmt_tree ()->stmts_are_full_exprs_p
791 = saved_stmts_are_full_exprs_p
;
797 is_invisiref_parm (const_tree t
)
799 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
800 && DECL_BY_REFERENCE (t
));
803 /* Return true if the uid in both int tree maps are equal. */
806 cxx_int_tree_map_hasher::equal (cxx_int_tree_map
*a
, cxx_int_tree_map
*b
)
808 return (a
->uid
== b
->uid
);
811 /* Hash a UID in a cxx_int_tree_map. */
814 cxx_int_tree_map_hasher::hash (cxx_int_tree_map
*item
)
819 /* A stable comparison routine for use with splay trees and DECLs. */
822 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
827 return DECL_UID (a
) - DECL_UID (b
);
830 /* OpenMP context during genericization. */
832 struct cp_genericize_omp_taskreg
836 struct cp_genericize_omp_taskreg
*outer
;
837 splay_tree variables
;
840 /* Return true if genericization should try to determine if
841 DECL is firstprivate or shared within task regions. */
844 omp_var_to_track (tree decl
)
846 tree type
= TREE_TYPE (decl
);
847 if (is_invisiref_parm (decl
))
848 type
= TREE_TYPE (type
);
849 while (TREE_CODE (type
) == ARRAY_TYPE
)
850 type
= TREE_TYPE (type
);
851 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
853 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
855 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
860 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
863 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
865 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
866 (splay_tree_key
) decl
);
869 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
871 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
872 if (!omp_ctx
->default_shared
)
874 struct cp_genericize_omp_taskreg
*octx
;
876 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
878 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
879 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
881 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
884 if (octx
->is_parallel
)
888 && (TREE_CODE (decl
) == PARM_DECL
889 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
890 && DECL_CONTEXT (decl
) == current_function_decl
)))
891 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
892 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
894 /* DECL is implicitly determined firstprivate in
895 the current task construct. Ensure copy ctor and
896 dtor are instantiated, because during gimplification
897 it will be already too late. */
898 tree type
= TREE_TYPE (decl
);
899 if (is_invisiref_parm (decl
))
900 type
= TREE_TYPE (type
);
901 while (TREE_CODE (type
) == ARRAY_TYPE
)
902 type
= TREE_TYPE (type
);
903 get_copy_ctor (type
, tf_none
);
904 get_dtor (type
, tf_none
);
907 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
911 /* Genericization context. */
913 struct cp_genericize_data
915 hash_set
<tree
> *p_set
;
916 vec
<tree
> bind_expr_stack
;
917 struct cp_genericize_omp_taskreg
*omp_ctx
;
922 /* Perform any pre-gimplification lowering of C++ front end trees to
926 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
929 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
930 hash_set
<tree
> *p_set
= wtd
->p_set
;
932 /* If in an OpenMP context, note var uses. */
933 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
935 || TREE_CODE (stmt
) == PARM_DECL
936 || TREE_CODE (stmt
) == RESULT_DECL
)
937 && omp_var_to_track (stmt
))
938 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
940 if (is_invisiref_parm (stmt
)
941 /* Don't dereference parms in a thunk, pass the references through. */
942 && !(DECL_THUNK_P (current_function_decl
)
943 && TREE_CODE (stmt
) == PARM_DECL
))
945 *stmt_p
= convert_from_reference (stmt
);
950 /* Map block scope extern declarations to visible declarations with the
951 same name and type in outer scopes if any. */
952 if (cp_function_chain
->extern_decl_map
953 && VAR_OR_FUNCTION_DECL_P (stmt
)
954 && DECL_EXTERNAL (stmt
))
956 struct cxx_int_tree_map
*h
, in
;
957 in
.uid
= DECL_UID (stmt
);
958 h
= cp_function_chain
->extern_decl_map
->find_with_hash (&in
, in
.uid
);
967 /* Other than invisiref parms, don't walk the same tree twice. */
968 if (p_set
->contains (stmt
))
974 if (TREE_CODE (stmt
) == ADDR_EXPR
975 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
977 /* If in an OpenMP context, note var uses. */
978 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
979 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
980 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
981 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
984 else if (TREE_CODE (stmt
) == RETURN_EXPR
985 && TREE_OPERAND (stmt
, 0)
986 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
987 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
989 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
990 switch (OMP_CLAUSE_CODE (stmt
))
992 case OMP_CLAUSE_LASTPRIVATE
:
993 /* Don't dereference an invisiref in OpenMP clauses. */
994 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
997 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
998 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
999 cp_genericize_r
, data
, NULL
);
1002 case OMP_CLAUSE_PRIVATE
:
1003 /* Don't dereference an invisiref in OpenMP clauses. */
1004 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1006 else if (wtd
->omp_ctx
!= NULL
)
1008 /* Private clause doesn't cause any references to the
1009 var in outer contexts, avoid calling
1010 omp_cxx_notice_variable for it. */
1011 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1012 wtd
->omp_ctx
= NULL
;
1013 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1019 case OMP_CLAUSE_SHARED
:
1020 case OMP_CLAUSE_FIRSTPRIVATE
:
1021 case OMP_CLAUSE_COPYIN
:
1022 case OMP_CLAUSE_COPYPRIVATE
:
1023 /* Don't dereference an invisiref in OpenMP clauses. */
1024 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1027 case OMP_CLAUSE_REDUCTION
:
1028 /* Don't dereference an invisiref in reduction clause's
1029 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1030 still needs to be genericized. */
1031 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1034 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1035 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1036 cp_genericize_r
, data
, NULL
);
1037 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1038 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1039 cp_genericize_r
, data
, NULL
);
1045 else if (IS_TYPE_OR_DECL_P (stmt
))
1048 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1049 to lower this construct before scanning it, so we need to lower these
1050 before doing anything else. */
1051 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
1052 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1053 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1056 CLEANUP_BODY (stmt
),
1057 CLEANUP_EXPR (stmt
));
1059 else if (TREE_CODE (stmt
) == IF_STMT
)
1061 genericize_if_stmt (stmt_p
);
1062 /* *stmt_p has changed, tail recurse to handle it again. */
1063 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1066 /* COND_EXPR might have incompatible types in branches if one or both
1067 arms are bitfields. Fix it up now. */
1068 else if (TREE_CODE (stmt
) == COND_EXPR
)
1071 = (TREE_OPERAND (stmt
, 1)
1072 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1075 = (TREE_OPERAND (stmt
, 2)
1076 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1079 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1080 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1082 TREE_OPERAND (stmt
, 1)
1083 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1084 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1088 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1089 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1091 TREE_OPERAND (stmt
, 2)
1092 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1093 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1098 else if (TREE_CODE (stmt
) == BIND_EXPR
)
1100 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1103 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1105 && !DECL_EXTERNAL (decl
)
1106 && omp_var_to_track (decl
))
1109 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1110 (splay_tree_key
) decl
);
1112 splay_tree_insert (wtd
->omp_ctx
->variables
,
1113 (splay_tree_key
) decl
,
1115 ? OMP_CLAUSE_DEFAULT_SHARED
1116 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1120 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1122 /* The point here is to not sanitize static initializers. */
1123 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1124 wtd
->no_sanitize_p
= true;
1125 for (tree decl
= BIND_EXPR_VARS (stmt
);
1127 decl
= DECL_CHAIN (decl
))
1129 && TREE_STATIC (decl
)
1130 && DECL_INITIAL (decl
))
1131 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1132 wtd
->no_sanitize_p
= no_sanitize_p
;
1134 wtd
->bind_expr_stack
.safe_push (stmt
);
1135 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1136 cp_genericize_r
, data
, NULL
);
1137 wtd
->bind_expr_stack
.pop ();
1140 else if (TREE_CODE (stmt
) == USING_STMT
)
1142 tree block
= NULL_TREE
;
1144 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1145 BLOCK, and append an IMPORTED_DECL to its
1146 BLOCK_VARS chained list. */
1147 if (wtd
->bind_expr_stack
.exists ())
1150 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1151 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1156 tree using_directive
;
1157 gcc_assert (TREE_OPERAND (stmt
, 0));
1159 using_directive
= make_node (IMPORTED_DECL
);
1160 TREE_TYPE (using_directive
) = void_type_node
;
1162 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1163 = TREE_OPERAND (stmt
, 0);
1164 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1165 BLOCK_VARS (block
) = using_directive
;
1167 /* The USING_STMT won't appear in GENERIC. */
1168 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1172 else if (TREE_CODE (stmt
) == DECL_EXPR
1173 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1175 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1176 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1179 else if (TREE_CODE (stmt
) == DECL_EXPR
)
1181 tree d
= DECL_EXPR_DECL (stmt
);
1182 if (TREE_CODE (d
) == VAR_DECL
)
1183 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1185 else if (TREE_CODE (stmt
) == OMP_PARALLEL
|| TREE_CODE (stmt
) == OMP_TASK
)
1187 struct cp_genericize_omp_taskreg omp_ctx
;
1192 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1193 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1194 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1195 omp_ctx
.outer
= wtd
->omp_ctx
;
1196 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1197 wtd
->omp_ctx
= &omp_ctx
;
1198 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1199 switch (OMP_CLAUSE_CODE (c
))
1201 case OMP_CLAUSE_SHARED
:
1202 case OMP_CLAUSE_PRIVATE
:
1203 case OMP_CLAUSE_FIRSTPRIVATE
:
1204 case OMP_CLAUSE_LASTPRIVATE
:
1205 decl
= OMP_CLAUSE_DECL (c
);
1206 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1208 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1211 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1212 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1213 ? OMP_CLAUSE_DEFAULT_SHARED
1214 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1215 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
1217 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1219 case OMP_CLAUSE_DEFAULT
:
1220 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1221 omp_ctx
.default_shared
= true;
1225 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1226 wtd
->omp_ctx
= omp_ctx
.outer
;
1227 splay_tree_delete (omp_ctx
.variables
);
1229 else if (TREE_CODE (stmt
) == TRY_BLOCK
)
1232 tree try_block
= wtd
->try_block
;
1233 wtd
->try_block
= stmt
;
1234 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1235 wtd
->try_block
= try_block
;
1236 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1238 else if (TREE_CODE (stmt
) == MUST_NOT_THROW_EXPR
)
1240 /* MUST_NOT_THROW_COND might be something else with TM. */
1241 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1244 tree try_block
= wtd
->try_block
;
1245 wtd
->try_block
= stmt
;
1246 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1247 wtd
->try_block
= try_block
;
1250 else if (TREE_CODE (stmt
) == THROW_EXPR
)
1252 location_t loc
= location_of (stmt
);
1253 if (TREE_NO_WARNING (stmt
))
1255 else if (wtd
->try_block
)
1257 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
1258 && warning_at (loc
, OPT_Wterminate
,
1259 "throw will always call terminate()")
1260 && cxx_dialect
>= cxx11
1261 && DECL_DESTRUCTOR_P (current_function_decl
))
1262 inform (loc
, "in C++11 destructors default to noexcept");
1266 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1267 && DECL_DESTRUCTOR_P (current_function_decl
)
1268 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1270 && (get_defaulted_eh_spec (current_function_decl
)
1271 == empty_except_spec
))
1272 warning_at (loc
, OPT_Wc__11_compat
,
1273 "in C++11 this throw will terminate because "
1274 "destructors default to noexcept");
1277 else if (TREE_CODE (stmt
) == CONVERT_EXPR
)
1278 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1279 else if (TREE_CODE (stmt
) == FOR_STMT
)
1280 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1281 else if (TREE_CODE (stmt
) == WHILE_STMT
)
1282 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1283 else if (TREE_CODE (stmt
) == DO_STMT
)
1284 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1285 else if (TREE_CODE (stmt
) == SWITCH_STMT
)
1286 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1287 else if (TREE_CODE (stmt
) == CONTINUE_STMT
)
1288 genericize_continue_stmt (stmt_p
);
1289 else if (TREE_CODE (stmt
) == BREAK_STMT
)
1290 genericize_break_stmt (stmt_p
);
1291 else if (TREE_CODE (stmt
) == OMP_FOR
1292 || TREE_CODE (stmt
) == OMP_SIMD
1293 || TREE_CODE (stmt
) == OMP_DISTRIBUTE
1294 || TREE_CODE (stmt
) == OMP_TASKLOOP
)
1295 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1296 else if (TREE_CODE (stmt
) == SIZEOF_EXPR
)
1298 if (SIZEOF_EXPR_TYPE_P (stmt
))
1300 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt
, 0)),
1301 SIZEOF_EXPR
, false);
1302 else if (TYPE_P (TREE_OPERAND (stmt
, 0)))
1303 *stmt_p
= cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt
, 0),
1304 SIZEOF_EXPR
, false);
1306 *stmt_p
= cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt
, 0),
1307 SIZEOF_EXPR
, false);
1308 if (*stmt_p
== error_mark_node
)
1309 *stmt_p
= size_one_node
;
1312 else if ((flag_sanitize
1313 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1314 && !wtd
->no_sanitize_p
)
1316 if ((flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1317 && TREE_CODE (stmt
) == NOP_EXPR
1318 && TREE_CODE (TREE_TYPE (stmt
)) == REFERENCE_TYPE
)
1319 ubsan_maybe_instrument_reference (stmt
);
1320 else if (TREE_CODE (stmt
) == CALL_EXPR
)
1322 tree fn
= CALL_EXPR_FN (stmt
);
1324 && !error_operand_p (fn
)
1325 && POINTER_TYPE_P (TREE_TYPE (fn
))
1326 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1329 = TREE_CODE (fn
) == ADDR_EXPR
1330 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1331 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1332 if (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1333 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1334 if ((flag_sanitize
& SANITIZE_VPTR
) && !is_ctor
)
1335 cp_ubsan_maybe_instrument_member_call (stmt
);
1340 p_set
->add (*stmt_p
);
1345 /* Lower C++ front end trees to GENERIC in T_P. */
1348 cp_genericize_tree (tree
* t_p
)
1350 struct cp_genericize_data wtd
;
1352 wtd
.p_set
= new hash_set
<tree
>;
1353 wtd
.bind_expr_stack
.create (0);
1355 wtd
.try_block
= NULL_TREE
;
1356 wtd
.no_sanitize_p
= false;
1357 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1359 wtd
.bind_expr_stack
.release ();
1360 if (flag_sanitize
& SANITIZE_VPTR
)
1361 cp_ubsan_instrument_member_accesses (t_p
);
1364 /* If a function that should end with a return in non-void
1365 function doesn't obviously end with return, add ubsan
1366 instrumentation code to verify it at runtime. */
1369 cp_ubsan_maybe_instrument_return (tree fndecl
)
1371 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1372 || DECL_CONSTRUCTOR_P (fndecl
)
1373 || DECL_DESTRUCTOR_P (fndecl
)
1374 || !targetm
.warn_func_return (fndecl
))
1377 tree t
= DECL_SAVED_TREE (fndecl
);
1380 switch (TREE_CODE (t
))
1383 t
= BIND_EXPR_BODY (t
);
1385 case TRY_FINALLY_EXPR
:
1386 t
= TREE_OPERAND (t
, 0);
1388 case STATEMENT_LIST
:
1390 tree_stmt_iterator i
= tsi_last (t
);
1407 t
= DECL_SAVED_TREE (fndecl
);
1408 if (TREE_CODE (t
) == BIND_EXPR
1409 && TREE_CODE (BIND_EXPR_BODY (t
)) == STATEMENT_LIST
)
1411 tree_stmt_iterator i
= tsi_last (BIND_EXPR_BODY (t
));
1412 t
= ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl
));
1413 tsi_link_after (&i
, t
, TSI_NEW_STMT
);
1418 cp_genericize (tree fndecl
)
1422 /* Fix up the types of parms passed by invisible reference. */
1423 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1424 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1426 /* If a function's arguments are copied to create a thunk,
1427 then DECL_BY_REFERENCE will be set -- but the type of the
1428 argument will be a pointer type, so we will never get
1430 gcc_assert (!DECL_BY_REFERENCE (t
));
1431 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1432 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1433 DECL_BY_REFERENCE (t
) = 1;
1434 TREE_ADDRESSABLE (t
) = 0;
1438 /* Do the same for the return value. */
1439 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1441 t
= DECL_RESULT (fndecl
);
1442 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1443 DECL_BY_REFERENCE (t
) = 1;
1444 TREE_ADDRESSABLE (t
) = 0;
1448 /* Adjust DECL_VALUE_EXPR of the original var. */
1449 tree outer
= outer_curly_brace_block (current_function_decl
);
1453 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1454 if (DECL_NAME (t
) == DECL_NAME (var
)
1455 && DECL_HAS_VALUE_EXPR_P (var
)
1456 && DECL_VALUE_EXPR (var
) == t
)
1458 tree val
= convert_from_reference (t
);
1459 SET_DECL_VALUE_EXPR (var
, val
);
1465 /* If we're a clone, the body is already GIMPLE. */
1466 if (DECL_CLONED_FUNCTION_P (fndecl
))
1469 /* Expand all the array notations here. */
1471 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl
)))
1472 DECL_SAVED_TREE (fndecl
) =
1473 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl
));
1475 /* We do want to see every occurrence of the parms, so we can't just use
1476 walk_tree's hash functionality. */
1477 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
));
1479 if (flag_sanitize
& SANITIZE_RETURN
1480 && do_ubsan_in_current_function ())
1481 cp_ubsan_maybe_instrument_return (fndecl
);
1483 /* Do everything else. */
1484 c_genericize (fndecl
);
1486 gcc_assert (bc_label
[bc_break
] == NULL
);
1487 gcc_assert (bc_label
[bc_continue
] == NULL
);
1490 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1491 NULL if there is in fact nothing to do. ARG2 may be null if FN
1492 actually only takes one argument. */
1495 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1497 tree defparm
, parm
, t
;
1505 nargs
= list_length (DECL_ARGUMENTS (fn
));
1506 argarray
= XALLOCAVEC (tree
, nargs
);
1508 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1510 defparm
= TREE_CHAIN (defparm
);
1512 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1514 tree inner_type
= TREE_TYPE (arg1
);
1515 tree start1
, end1
, p1
;
1516 tree start2
= NULL
, p2
= NULL
;
1517 tree ret
= NULL
, lab
;
1523 inner_type
= TREE_TYPE (inner_type
);
1524 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1525 size_zero_node
, NULL
, NULL
);
1527 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1528 size_zero_node
, NULL
, NULL
);
1530 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1531 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1533 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1535 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1536 end1
= fold_build_pointer_plus (start1
, end1
);
1538 p1
= create_tmp_var (TREE_TYPE (start1
));
1539 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1540 append_to_statement_list (t
, &ret
);
1544 p2
= create_tmp_var (TREE_TYPE (start2
));
1545 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1546 append_to_statement_list (t
, &ret
);
1549 lab
= create_artificial_label (input_location
);
1550 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1551 append_to_statement_list (t
, &ret
);
1556 /* Handle default arguments. */
1557 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1558 parm
= TREE_CHAIN (parm
), i
++)
1559 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1560 TREE_PURPOSE (parm
), fn
, i
,
1561 tf_warning_or_error
);
1562 t
= build_call_a (fn
, i
, argarray
);
1563 t
= fold_convert (void_type_node
, t
);
1564 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1565 append_to_statement_list (t
, &ret
);
1567 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1568 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1569 append_to_statement_list (t
, &ret
);
1573 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1574 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1575 append_to_statement_list (t
, &ret
);
1578 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1579 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1580 append_to_statement_list (t
, &ret
);
1586 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1588 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1589 /* Handle default arguments. */
1590 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1591 parm
= TREE_CHAIN (parm
), i
++)
1592 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1593 TREE_PURPOSE (parm
),
1594 fn
, i
, tf_warning_or_error
);
1595 t
= build_call_a (fn
, i
, argarray
);
1596 t
= fold_convert (void_type_node
, t
);
1597 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1601 /* Return code to initialize DECL with its default constructor, or
1602 NULL if there's nothing to do. */
1605 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1607 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1611 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1616 /* Return code to initialize DST with a copy constructor from SRC. */
1619 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1621 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1625 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1627 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1632 /* Similarly, except use an assignment operator instead. */
1635 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1637 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1641 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1643 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1648 /* Return code to destroy DECL. */
1651 cxx_omp_clause_dtor (tree clause
, tree decl
)
1653 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1657 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1662 /* True if OpenMP should privatize what this DECL points to rather
1663 than the DECL itself. */
1666 cxx_omp_privatize_by_reference (const_tree decl
)
1668 return (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
1669 || is_invisiref_parm (decl
));
1672 /* Return true if DECL is const qualified var having no mutable member. */
1674 cxx_omp_const_qual_no_mutable (tree decl
)
1676 tree type
= TREE_TYPE (decl
);
1677 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1679 if (!is_invisiref_parm (decl
))
1681 type
= TREE_TYPE (type
);
1683 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1685 /* NVR doesn't preserve const qualification of the
1687 tree outer
= outer_curly_brace_block (current_function_decl
);
1691 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1692 if (DECL_NAME (decl
) == DECL_NAME (var
)
1693 && (TYPE_MAIN_VARIANT (type
)
1694 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1696 if (TYPE_READONLY (TREE_TYPE (var
)))
1697 type
= TREE_TYPE (var
);
1703 if (type
== error_mark_node
)
1706 /* Variables with const-qualified type having no mutable member
1707 are predetermined shared. */
1708 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1714 /* True if OpenMP sharing attribute of DECL is predetermined. */
1716 enum omp_clause_default_kind
1717 cxx_omp_predetermined_sharing (tree decl
)
1719 /* Static data members are predetermined shared. */
1720 if (TREE_STATIC (decl
))
1722 tree ctx
= CP_DECL_CONTEXT (decl
);
1723 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1724 return OMP_CLAUSE_DEFAULT_SHARED
;
1727 /* Const qualified vars having no mutable member are predetermined
1729 if (cxx_omp_const_qual_no_mutable (decl
))
1730 return OMP_CLAUSE_DEFAULT_SHARED
;
1732 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1735 /* Finalize an implicitly determined clause. */
1738 cxx_omp_finish_clause (tree c
, gimple_seq
*)
1740 tree decl
, inner_type
;
1741 bool make_shared
= false;
1743 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1746 decl
= OMP_CLAUSE_DECL (c
);
1747 decl
= require_complete_type (decl
);
1748 inner_type
= TREE_TYPE (decl
);
1749 if (decl
== error_mark_node
)
1751 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1752 inner_type
= TREE_TYPE (inner_type
);
1754 /* We're interested in the base element, not arrays. */
1755 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1756 inner_type
= TREE_TYPE (inner_type
);
1758 /* Check for special function availability by building a call to one.
1759 Save the results, because later we won't be in the right context
1760 for making these queries. */
1762 && CLASS_TYPE_P (inner_type
)
1763 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
1767 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
1770 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1771 disregarded in OpenMP construct, because it is going to be
1772 remapped during OpenMP lowering. SHARED is true if DECL
1773 is going to be shared, false if it is going to be privatized. */
1776 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
1780 && DECL_HAS_VALUE_EXPR_P (decl
)
1781 && DECL_ARTIFICIAL (decl
)
1782 && DECL_LANG_SPECIFIC (decl
)
1783 && DECL_OMP_PRIVATIZED_MEMBER (decl
);