1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "stor-layout.h"
31 #include "c-family/c-common.h"
32 #include "tree-iterator.h"
34 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
43 #include "splay-tree.h"
45 #include "c-family/c-ubsan.h"
48 #include "gimple-expr.h"
50 /* Forward declarations. */
52 static tree
cp_genericize_r (tree
*, int *, void *);
53 static void cp_genericize_tree (tree
*);
55 /* Local declarations. */
57 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
59 /* Stack of labels which are targets for "break" or "continue",
60 linked through TREE_CHAIN. */
61 static tree bc_label
[2];
63 /* Begin a scope which can be exited by a break or continue statement. BC
66 Just creates a label with location LOCATION and pushes it into the current
70 begin_bc_block (enum bc_t bc
, location_t location
)
72 tree label
= create_artificial_label (location
);
73 DECL_CHAIN (label
) = bc_label
[bc
];
76 LABEL_DECL_BREAK (label
) = true;
78 LABEL_DECL_CONTINUE (label
) = true;
82 /* Finish a scope which can be exited by a break or continue statement.
83 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
84 an expression for the contents of the scope.
86 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
87 BLOCK. Otherwise, just forget the label. */
90 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
92 gcc_assert (label
== bc_label
[bc
]);
94 if (TREE_USED (label
))
95 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
98 bc_label
[bc
] = DECL_CHAIN (label
);
99 DECL_CHAIN (label
) = NULL_TREE
;
102 /* Get the LABEL_EXPR to represent a break or continue statement
103 in the current block scope. BC indicates which. */
106 get_bc_label (enum bc_t bc
)
108 tree label
= bc_label
[bc
];
110 /* Mark the label used for finish_bc_block. */
111 TREE_USED (label
) = 1;
115 /* Genericize a TRY_BLOCK. */
118 genericize_try_block (tree
*stmt_p
)
120 tree body
= TRY_STMTS (*stmt_p
);
121 tree cleanup
= TRY_HANDLERS (*stmt_p
);
123 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
126 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
129 genericize_catch_block (tree
*stmt_p
)
131 tree type
= HANDLER_TYPE (*stmt_p
);
132 tree body
= HANDLER_BODY (*stmt_p
);
134 /* FIXME should the caught type go in TREE_TYPE? */
135 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
138 /* A terser interface for building a representation of an exception
142 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
146 /* FIXME should the allowed types go in TREE_TYPE? */
147 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
148 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
150 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
151 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
156 /* Genericize an EH_SPEC_BLOCK by converting it to a
157 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
160 genericize_eh_spec_block (tree
*stmt_p
)
162 tree body
= EH_SPEC_STMTS (*stmt_p
);
163 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
164 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
166 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
167 TREE_NO_WARNING (*stmt_p
) = true;
168 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
171 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
174 genericize_if_stmt (tree
*stmt_p
)
176 tree stmt
, cond
, then_
, else_
;
177 location_t locus
= EXPR_LOCATION (*stmt_p
);
180 cond
= IF_COND (stmt
);
181 then_
= THEN_CLAUSE (stmt
);
182 else_
= ELSE_CLAUSE (stmt
);
185 then_
= build_empty_stmt (locus
);
187 else_
= build_empty_stmt (locus
);
189 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
191 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
194 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
195 if (CAN_HAVE_LOCATION_P (stmt
) && !EXPR_HAS_LOCATION (stmt
))
196 SET_EXPR_LOCATION (stmt
, locus
);
200 /* Build a generic representation of one of the C loop forms. COND is the
201 loop condition or NULL_TREE. BODY is the (possibly compound) statement
202 controlled by the loop. INCR is the increment expression of a for-loop,
203 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
204 evaluated before the loop body as in while and for loops, or after the
205 loop body as in do-while loops. */
208 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
209 tree incr
, bool cond_is_first
, int *walk_subtrees
,
214 tree stmt_list
= NULL
;
216 blab
= begin_bc_block (bc_break
, start_locus
);
217 clab
= begin_bc_block (bc_continue
, start_locus
);
219 if (incr
&& EXPR_P (incr
))
220 SET_EXPR_LOCATION (incr
, start_locus
);
222 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
223 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
224 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
227 if (cond
&& TREE_CODE (cond
) != INTEGER_CST
)
229 /* If COND is constant, don't bother building an exit. If it's false,
230 we won't build a loop. If it's true, any exits are in the body. */
231 location_t cloc
= EXPR_LOC_OR_LOC (cond
, start_locus
);
232 exit
= build1_loc (cloc
, GOTO_EXPR
, void_type_node
,
233 get_bc_label (bc_break
));
234 exit
= fold_build3_loc (cloc
, COND_EXPR
, void_type_node
, cond
,
235 build_empty_stmt (cloc
), exit
);
238 if (exit
&& cond_is_first
)
239 append_to_statement_list (exit
, &stmt_list
);
240 append_to_statement_list (body
, &stmt_list
);
241 finish_bc_block (&stmt_list
, bc_continue
, clab
);
242 append_to_statement_list (incr
, &stmt_list
);
243 if (exit
&& !cond_is_first
)
244 append_to_statement_list (exit
, &stmt_list
);
247 stmt_list
= build_empty_stmt (start_locus
);
250 if (cond
&& integer_zerop (cond
))
253 loop
= fold_build3_loc (start_locus
, COND_EXPR
,
254 void_type_node
, cond
, stmt_list
,
255 build_empty_stmt (start_locus
));
260 loop
= build1_loc (start_locus
, LOOP_EXPR
, void_type_node
, stmt_list
);
263 append_to_statement_list (loop
, &stmt_list
);
264 finish_bc_block (&stmt_list
, bc_break
, blab
);
266 stmt_list
= build_empty_stmt (start_locus
);
271 /* Genericize a FOR_STMT node *STMT_P. */
274 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
279 tree init
= FOR_INIT_STMT (stmt
);
283 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
284 append_to_statement_list (init
, &expr
);
287 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
288 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
289 append_to_statement_list (loop
, &expr
);
290 if (expr
== NULL_TREE
)
295 /* Genericize a WHILE_STMT node *STMT_P. */
298 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
301 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
302 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
305 /* Genericize a DO_STMT node *STMT_P. */
308 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
311 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
312 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
315 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
318 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
321 tree break_block
, body
, cond
, type
;
322 location_t stmt_locus
= EXPR_LOCATION (stmt
);
324 break_block
= begin_bc_block (bc_break
, stmt_locus
);
326 body
= SWITCH_STMT_BODY (stmt
);
328 body
= build_empty_stmt (stmt_locus
);
329 cond
= SWITCH_STMT_COND (stmt
);
330 type
= SWITCH_STMT_TYPE (stmt
);
332 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
333 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
334 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
337 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
338 finish_bc_block (stmt_p
, bc_break
, break_block
);
341 /* Genericize a CONTINUE_STMT node *STMT_P. */
344 genericize_continue_stmt (tree
*stmt_p
)
346 tree stmt_list
= NULL
;
347 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
348 tree label
= get_bc_label (bc_continue
);
349 location_t location
= EXPR_LOCATION (*stmt_p
);
350 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
351 append_to_statement_list (pred
, &stmt_list
);
352 append_to_statement_list (jump
, &stmt_list
);
356 /* Genericize a BREAK_STMT node *STMT_P. */
359 genericize_break_stmt (tree
*stmt_p
)
361 tree label
= get_bc_label (bc_break
);
362 location_t location
= EXPR_LOCATION (*stmt_p
);
363 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
366 /* Genericize a OMP_FOR node *STMT_P. */
369 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
372 location_t locus
= EXPR_LOCATION (stmt
);
373 tree clab
= begin_bc_block (bc_continue
, locus
);
375 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
376 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
377 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
378 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
379 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
380 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
383 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
386 /* Hook into the middle of gimplifying an OMP_FOR node. */
388 static enum gimplify_status
389 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
391 tree for_stmt
= *expr_p
;
392 gimple_seq seq
= NULL
;
394 /* Protect ourselves from recursion. */
395 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
397 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
399 gimplify_and_add (for_stmt
, &seq
);
400 gimple_seq_add_seq (pre_p
, seq
);
402 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
407 /* Gimplify an EXPR_STMT node. */
410 gimplify_expr_stmt (tree
*stmt_p
)
412 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
414 if (stmt
== error_mark_node
)
417 /* Gimplification of a statement expression will nullify the
418 statement if all its side effects are moved to *PRE_P and *POST_P.
420 In this case we will not want to emit the gimplified statement.
421 However, we may still want to emit a warning, so we do that before
423 if (stmt
&& warn_unused_value
)
425 if (!TREE_SIDE_EFFECTS (stmt
))
427 if (!IS_EMPTY_STMT (stmt
)
428 && !VOID_TYPE_P (TREE_TYPE (stmt
))
429 && !TREE_NO_WARNING (stmt
))
430 warning (OPT_Wunused_value
, "statement with no effect");
433 warn_if_unused_value (stmt
, input_location
);
436 if (stmt
== NULL_TREE
)
437 stmt
= alloc_stmt_list ();
442 /* Gimplify initialization from an AGGR_INIT_EXPR. */
445 cp_gimplify_init_expr (tree
*expr_p
)
447 tree from
= TREE_OPERAND (*expr_p
, 1);
448 tree to
= TREE_OPERAND (*expr_p
, 0);
451 /* What about code that pulls out the temp and uses it elsewhere? I
452 think that such code never uses the TARGET_EXPR as an initializer. If
453 I'm wrong, we'll abort because the temp won't have any RTL. In that
454 case, I guess we'll need to replace references somehow. */
455 if (TREE_CODE (from
) == TARGET_EXPR
)
456 from
= TARGET_EXPR_INITIAL (from
);
458 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
459 inside the TARGET_EXPR. */
462 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
464 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
465 replace the slot operand with our target.
467 Should we add a target parm to gimplify_expr instead? No, as in this
468 case we want to replace the INIT_EXPR. */
469 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
470 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
472 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
473 AGGR_INIT_EXPR_SLOT (sub
) = to
;
475 VEC_INIT_EXPR_SLOT (sub
) = to
;
478 /* The initialization is now a side-effect, so the container can
481 TREE_TYPE (from
) = void_type_node
;
484 if (cxx_dialect
>= cxx14
&& TREE_CODE (sub
) == CONSTRUCTOR
)
485 /* Handle aggregate NSDMI. */
486 replace_placeholders (sub
, to
);
491 t
= TREE_OPERAND (t
, 1);
496 /* Gimplify a MUST_NOT_THROW_EXPR. */
498 static enum gimplify_status
499 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
502 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
503 tree body
= TREE_OPERAND (stmt
, 0);
504 gimple_seq try_
= NULL
;
505 gimple_seq catch_
= NULL
;
508 gimplify_and_add (body
, &try_
);
509 mnt
= gimple_build_eh_must_not_throw (terminate_node
);
510 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
511 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
513 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
524 /* Return TRUE if an operand (OP) of a given TYPE being copied is
525 really just an empty class copy.
527 Check that the operand has a simple form so that TARGET_EXPRs and
528 non-empty CONSTRUCTORs get reduced properly, and we leave the
529 return slot optimization alone because it isn't a copy. */
532 simple_empty_class_p (tree type
, tree op
)
535 ((TREE_CODE (op
) == COMPOUND_EXPR
536 && simple_empty_class_p (type
, TREE_OPERAND (op
, 1)))
537 || is_gimple_lvalue (op
)
538 || INDIRECT_REF_P (op
)
539 || (TREE_CODE (op
) == CONSTRUCTOR
540 && CONSTRUCTOR_NELTS (op
) == 0
541 && !TREE_CLOBBER_P (op
))
542 || (TREE_CODE (op
) == CALL_EXPR
543 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
544 && is_really_empty_class (type
);
547 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
550 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
552 int saved_stmts_are_full_exprs_p
= 0;
553 enum tree_code code
= TREE_CODE (*expr_p
);
554 enum gimplify_status ret
;
556 if (STATEMENT_CODE_P (code
))
558 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
559 current_stmt_tree ()->stmts_are_full_exprs_p
560 = STMT_IS_FULL_EXPR_P (*expr_p
);
566 *expr_p
= cplus_expand_constant (*expr_p
);
571 simplify_aggr_init_expr (expr_p
);
577 location_t loc
= input_location
;
578 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
579 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
580 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
581 input_location
= EXPR_LOCATION (*expr_p
);
582 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
583 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
585 tf_warning_or_error
);
586 cp_genericize_tree (expr_p
);
588 input_location
= loc
;
593 /* FIXME communicate throw type to back end, probably by moving
594 THROW_EXPR into ../tree.def. */
595 *expr_p
= TREE_OPERAND (*expr_p
, 0);
599 case MUST_NOT_THROW_EXPR
:
600 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
603 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
604 LHS of an assignment might also be involved in the RHS, as in bug
607 if (fn_contains_cilk_spawn_p (cfun
)
608 && cilk_detect_spawn_and_unwrap (expr_p
)
610 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
611 cp_gimplify_init_expr (expr_p
);
612 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
614 /* Otherwise fall through. */
618 if (fn_contains_cilk_spawn_p (cfun
)
619 && cilk_detect_spawn_and_unwrap (expr_p
)
621 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
623 /* If the back end isn't clever enough to know that the lhs and rhs
624 types are the same, add an explicit conversion. */
625 tree op0
= TREE_OPERAND (*expr_p
, 0);
626 tree op1
= TREE_OPERAND (*expr_p
, 1);
628 if (!error_operand_p (op0
)
629 && !error_operand_p (op1
)
630 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
631 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
632 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
633 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
634 TREE_TYPE (op0
), op1
);
636 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
))
638 /* Remove any copies of empty classes. Also drop volatile
639 variables on the RHS to avoid infinite recursion from
640 gimplify_expr trying to load the value. */
641 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
642 is_gimple_lvalue
, fb_lvalue
);
643 if (TREE_SIDE_EFFECTS (op1
))
645 if (TREE_THIS_VOLATILE (op1
)
646 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
647 op1
= build_fold_addr_expr (op1
);
649 gimplify_and_add (op1
, pre_p
);
651 *expr_p
= TREE_OPERAND (*expr_p
, 0);
657 case EMPTY_CLASS_EXPR
:
658 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
659 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
664 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
669 genericize_try_block (expr_p
);
674 genericize_catch_block (expr_p
);
679 genericize_eh_spec_block (expr_p
);
697 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
701 gimplify_expr_stmt (expr_p
);
705 case UNARY_PLUS_EXPR
:
707 tree arg
= TREE_OPERAND (*expr_p
, 0);
708 tree type
= TREE_TYPE (*expr_p
);
709 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
715 case CILK_SPAWN_STMT
:
717 (fn_contains_cilk_spawn_p (cfun
)
718 && cilk_detect_spawn_and_unwrap (expr_p
));
720 /* If errors are seen, then just process it as a CALL_EXPR. */
722 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
725 if (fn_contains_cilk_spawn_p (cfun
)
726 && cilk_detect_spawn_and_unwrap (expr_p
)
728 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
730 /* DR 1030 says that we need to evaluate the elements of an
731 initializer-list in forward order even when it's used as arguments to
732 a constructor. So if the target wants to evaluate them in reverse
733 order and there's more than one argument other than 'this', gimplify
736 if (PUSH_ARGS_REVERSED
&& CALL_EXPR_LIST_INIT_P (*expr_p
)
737 && call_expr_nargs (*expr_p
) > 2)
739 int nargs
= call_expr_nargs (*expr_p
);
740 location_t loc
= EXPR_LOC_OR_LOC (*expr_p
, input_location
);
741 for (int i
= 1; i
< nargs
; ++i
)
743 enum gimplify_status t
744 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
);
752 if (TREE_OPERAND (*expr_p
, 0)
753 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
754 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
756 expr_p
= &TREE_OPERAND (*expr_p
, 0);
757 code
= TREE_CODE (*expr_p
);
758 /* Avoid going through the INIT_EXPR case, which can
759 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
760 goto modify_expr_case
;
765 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
769 /* Restore saved state. */
770 if (STATEMENT_CODE_P (code
))
771 current_stmt_tree ()->stmts_are_full_exprs_p
772 = saved_stmts_are_full_exprs_p
;
778 is_invisiref_parm (const_tree t
)
780 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
781 && DECL_BY_REFERENCE (t
));
784 /* Return true if the uid in both int tree maps are equal. */
787 cxx_int_tree_map_hasher::equal (cxx_int_tree_map
*a
, cxx_int_tree_map
*b
)
789 return (a
->uid
== b
->uid
);
792 /* Hash a UID in a cxx_int_tree_map. */
795 cxx_int_tree_map_hasher::hash (cxx_int_tree_map
*item
)
800 /* A stable comparison routine for use with splay trees and DECLs. */
803 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
808 return DECL_UID (a
) - DECL_UID (b
);
811 /* OpenMP context during genericization. */
813 struct cp_genericize_omp_taskreg
817 struct cp_genericize_omp_taskreg
*outer
;
818 splay_tree variables
;
821 /* Return true if genericization should try to determine if
822 DECL is firstprivate or shared within task regions. */
825 omp_var_to_track (tree decl
)
827 tree type
= TREE_TYPE (decl
);
828 if (is_invisiref_parm (decl
))
829 type
= TREE_TYPE (type
);
830 while (TREE_CODE (type
) == ARRAY_TYPE
)
831 type
= TREE_TYPE (type
);
832 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
834 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
836 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
841 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
844 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
846 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
847 (splay_tree_key
) decl
);
850 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
852 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
853 if (!omp_ctx
->default_shared
)
855 struct cp_genericize_omp_taskreg
*octx
;
857 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
859 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
860 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
862 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
865 if (octx
->is_parallel
)
869 && (TREE_CODE (decl
) == PARM_DECL
870 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
871 && DECL_CONTEXT (decl
) == current_function_decl
)))
872 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
873 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
875 /* DECL is implicitly determined firstprivate in
876 the current task construct. Ensure copy ctor and
877 dtor are instantiated, because during gimplification
878 it will be already too late. */
879 tree type
= TREE_TYPE (decl
);
880 if (is_invisiref_parm (decl
))
881 type
= TREE_TYPE (type
);
882 while (TREE_CODE (type
) == ARRAY_TYPE
)
883 type
= TREE_TYPE (type
);
884 get_copy_ctor (type
, tf_none
);
885 get_dtor (type
, tf_none
);
888 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
892 /* Genericization context. */
894 struct cp_genericize_data
896 hash_set
<tree
> *p_set
;
897 vec
<tree
> bind_expr_stack
;
898 struct cp_genericize_omp_taskreg
*omp_ctx
;
903 /* Perform any pre-gimplification lowering of C++ front end trees to
907 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
910 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
911 hash_set
<tree
> *p_set
= wtd
->p_set
;
913 /* If in an OpenMP context, note var uses. */
914 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
916 || TREE_CODE (stmt
) == PARM_DECL
917 || TREE_CODE (stmt
) == RESULT_DECL
)
918 && omp_var_to_track (stmt
))
919 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
921 if (is_invisiref_parm (stmt
)
922 /* Don't dereference parms in a thunk, pass the references through. */
923 && !(DECL_THUNK_P (current_function_decl
)
924 && TREE_CODE (stmt
) == PARM_DECL
))
926 *stmt_p
= convert_from_reference (stmt
);
931 /* Map block scope extern declarations to visible declarations with the
932 same name and type in outer scopes if any. */
933 if (cp_function_chain
->extern_decl_map
934 && VAR_OR_FUNCTION_DECL_P (stmt
)
935 && DECL_EXTERNAL (stmt
))
937 struct cxx_int_tree_map
*h
, in
;
938 in
.uid
= DECL_UID (stmt
);
939 h
= cp_function_chain
->extern_decl_map
->find_with_hash (&in
, in
.uid
);
948 /* Other than invisiref parms, don't walk the same tree twice. */
949 if (p_set
->contains (stmt
))
955 if (TREE_CODE (stmt
) == ADDR_EXPR
956 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
958 /* If in an OpenMP context, note var uses. */
959 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
960 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
961 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
962 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
965 else if (TREE_CODE (stmt
) == RETURN_EXPR
966 && TREE_OPERAND (stmt
, 0)
967 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
968 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
970 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
971 switch (OMP_CLAUSE_CODE (stmt
))
973 case OMP_CLAUSE_LASTPRIVATE
:
974 /* Don't dereference an invisiref in OpenMP clauses. */
975 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
978 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
979 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
980 cp_genericize_r
, data
, NULL
);
983 case OMP_CLAUSE_PRIVATE
:
984 /* Don't dereference an invisiref in OpenMP clauses. */
985 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
987 else if (wtd
->omp_ctx
!= NULL
)
989 /* Private clause doesn't cause any references to the
990 var in outer contexts, avoid calling
991 omp_cxx_notice_variable for it. */
992 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
994 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1000 case OMP_CLAUSE_SHARED
:
1001 case OMP_CLAUSE_FIRSTPRIVATE
:
1002 case OMP_CLAUSE_COPYIN
:
1003 case OMP_CLAUSE_COPYPRIVATE
:
1004 /* Don't dereference an invisiref in OpenMP clauses. */
1005 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1008 case OMP_CLAUSE_REDUCTION
:
1009 /* Don't dereference an invisiref in reduction clause's
1010 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1011 still needs to be genericized. */
1012 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1015 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1016 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1017 cp_genericize_r
, data
, NULL
);
1018 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1019 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1020 cp_genericize_r
, data
, NULL
);
1026 else if (IS_TYPE_OR_DECL_P (stmt
))
1029 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1030 to lower this construct before scanning it, so we need to lower these
1031 before doing anything else. */
1032 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
1033 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1034 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1037 CLEANUP_BODY (stmt
),
1038 CLEANUP_EXPR (stmt
));
1040 else if (TREE_CODE (stmt
) == IF_STMT
)
1042 genericize_if_stmt (stmt_p
);
1043 /* *stmt_p has changed, tail recurse to handle it again. */
1044 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1047 /* COND_EXPR might have incompatible types in branches if one or both
1048 arms are bitfields. Fix it up now. */
1049 else if (TREE_CODE (stmt
) == COND_EXPR
)
1052 = (TREE_OPERAND (stmt
, 1)
1053 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1056 = (TREE_OPERAND (stmt
, 2)
1057 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1060 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1061 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1063 TREE_OPERAND (stmt
, 1)
1064 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1065 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1069 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1070 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1072 TREE_OPERAND (stmt
, 2)
1073 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1074 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1079 else if (TREE_CODE (stmt
) == BIND_EXPR
)
1081 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1084 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1086 && !DECL_EXTERNAL (decl
)
1087 && omp_var_to_track (decl
))
1090 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1091 (splay_tree_key
) decl
);
1093 splay_tree_insert (wtd
->omp_ctx
->variables
,
1094 (splay_tree_key
) decl
,
1096 ? OMP_CLAUSE_DEFAULT_SHARED
1097 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1101 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1103 /* The point here is to not sanitize static initializers. */
1104 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1105 wtd
->no_sanitize_p
= true;
1106 for (tree decl
= BIND_EXPR_VARS (stmt
);
1108 decl
= DECL_CHAIN (decl
))
1110 && TREE_STATIC (decl
)
1111 && DECL_INITIAL (decl
))
1112 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1113 wtd
->no_sanitize_p
= no_sanitize_p
;
1115 wtd
->bind_expr_stack
.safe_push (stmt
);
1116 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1117 cp_genericize_r
, data
, NULL
);
1118 wtd
->bind_expr_stack
.pop ();
1121 else if (TREE_CODE (stmt
) == USING_STMT
)
1123 tree block
= NULL_TREE
;
1125 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1126 BLOCK, and append an IMPORTED_DECL to its
1127 BLOCK_VARS chained list. */
1128 if (wtd
->bind_expr_stack
.exists ())
1131 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1132 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1137 tree using_directive
;
1138 gcc_assert (TREE_OPERAND (stmt
, 0));
1140 using_directive
= make_node (IMPORTED_DECL
);
1141 TREE_TYPE (using_directive
) = void_type_node
;
1143 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1144 = TREE_OPERAND (stmt
, 0);
1145 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1146 BLOCK_VARS (block
) = using_directive
;
1148 /* The USING_STMT won't appear in GENERIC. */
1149 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1153 else if (TREE_CODE (stmt
) == DECL_EXPR
1154 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1156 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1157 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1160 else if (TREE_CODE (stmt
) == DECL_EXPR
)
1162 tree d
= DECL_EXPR_DECL (stmt
);
1163 if (TREE_CODE (d
) == VAR_DECL
)
1164 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1166 else if (TREE_CODE (stmt
) == OMP_PARALLEL
|| TREE_CODE (stmt
) == OMP_TASK
)
1168 struct cp_genericize_omp_taskreg omp_ctx
;
1173 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1174 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1175 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1176 omp_ctx
.outer
= wtd
->omp_ctx
;
1177 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1178 wtd
->omp_ctx
= &omp_ctx
;
1179 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1180 switch (OMP_CLAUSE_CODE (c
))
1182 case OMP_CLAUSE_SHARED
:
1183 case OMP_CLAUSE_PRIVATE
:
1184 case OMP_CLAUSE_FIRSTPRIVATE
:
1185 case OMP_CLAUSE_LASTPRIVATE
:
1186 decl
= OMP_CLAUSE_DECL (c
);
1187 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1189 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1192 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1193 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1194 ? OMP_CLAUSE_DEFAULT_SHARED
1195 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1196 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
1198 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1200 case OMP_CLAUSE_DEFAULT
:
1201 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1202 omp_ctx
.default_shared
= true;
1206 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1207 wtd
->omp_ctx
= omp_ctx
.outer
;
1208 splay_tree_delete (omp_ctx
.variables
);
1210 else if (TREE_CODE (stmt
) == TRY_BLOCK
)
1213 tree try_block
= wtd
->try_block
;
1214 wtd
->try_block
= stmt
;
1215 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1216 wtd
->try_block
= try_block
;
1217 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1219 else if (TREE_CODE (stmt
) == MUST_NOT_THROW_EXPR
)
1221 /* MUST_NOT_THROW_COND might be something else with TM. */
1222 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1225 tree try_block
= wtd
->try_block
;
1226 wtd
->try_block
= stmt
;
1227 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1228 wtd
->try_block
= try_block
;
1231 else if (TREE_CODE (stmt
) == THROW_EXPR
)
1233 location_t loc
= location_of (stmt
);
1234 if (TREE_NO_WARNING (stmt
))
1236 else if (wtd
->try_block
)
1238 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
1239 && warning_at (loc
, OPT_Wterminate
,
1240 "throw will always call terminate()")
1241 && cxx_dialect
>= cxx11
1242 && DECL_DESTRUCTOR_P (current_function_decl
))
1243 inform (loc
, "in C++11 destructors default to noexcept");
1247 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1248 && DECL_DESTRUCTOR_P (current_function_decl
)
1249 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1251 && (get_defaulted_eh_spec (current_function_decl
)
1252 == empty_except_spec
))
1253 warning_at (loc
, OPT_Wc__11_compat
,
1254 "in C++11 this throw will terminate because "
1255 "destructors default to noexcept");
1258 else if (TREE_CODE (stmt
) == CONVERT_EXPR
)
1259 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1260 else if (TREE_CODE (stmt
) == FOR_STMT
)
1261 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1262 else if (TREE_CODE (stmt
) == WHILE_STMT
)
1263 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1264 else if (TREE_CODE (stmt
) == DO_STMT
)
1265 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1266 else if (TREE_CODE (stmt
) == SWITCH_STMT
)
1267 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1268 else if (TREE_CODE (stmt
) == CONTINUE_STMT
)
1269 genericize_continue_stmt (stmt_p
);
1270 else if (TREE_CODE (stmt
) == BREAK_STMT
)
1271 genericize_break_stmt (stmt_p
);
1272 else if (TREE_CODE (stmt
) == OMP_FOR
1273 || TREE_CODE (stmt
) == OMP_SIMD
1274 || TREE_CODE (stmt
) == OMP_DISTRIBUTE
)
1275 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1276 else if (TREE_CODE (stmt
) == SIZEOF_EXPR
)
1278 if (SIZEOF_EXPR_TYPE_P (stmt
))
1280 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt
, 0)),
1281 SIZEOF_EXPR
, false);
1282 else if (TYPE_P (TREE_OPERAND (stmt
, 0)))
1283 *stmt_p
= cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt
, 0),
1284 SIZEOF_EXPR
, false);
1286 *stmt_p
= cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt
, 0),
1287 SIZEOF_EXPR
, false);
1288 if (*stmt_p
== error_mark_node
)
1289 *stmt_p
= size_one_node
;
1292 else if ((flag_sanitize
1293 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1294 && !wtd
->no_sanitize_p
)
1296 if ((flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1297 && TREE_CODE (stmt
) == NOP_EXPR
1298 && TREE_CODE (TREE_TYPE (stmt
)) == REFERENCE_TYPE
)
1299 ubsan_maybe_instrument_reference (stmt
);
1300 else if (TREE_CODE (stmt
) == CALL_EXPR
)
1302 tree fn
= CALL_EXPR_FN (stmt
);
1304 && !error_operand_p (fn
)
1305 && POINTER_TYPE_P (TREE_TYPE (fn
))
1306 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1309 = TREE_CODE (fn
) == ADDR_EXPR
1310 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1311 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1312 if (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1313 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1314 if ((flag_sanitize
& SANITIZE_VPTR
) && !is_ctor
)
1315 cp_ubsan_maybe_instrument_member_call (stmt
);
1320 p_set
->add (*stmt_p
);
1325 /* Lower C++ front end trees to GENERIC in T_P. */
1328 cp_genericize_tree (tree
* t_p
)
1330 struct cp_genericize_data wtd
;
1332 wtd
.p_set
= new hash_set
<tree
>;
1333 wtd
.bind_expr_stack
.create (0);
1335 wtd
.try_block
= NULL_TREE
;
1336 wtd
.no_sanitize_p
= false;
1337 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1339 wtd
.bind_expr_stack
.release ();
1340 if (flag_sanitize
& SANITIZE_VPTR
)
1341 cp_ubsan_instrument_member_accesses (t_p
);
1344 /* If a function that should end with a return in non-void
1345 function doesn't obviously end with return, add ubsan
1346 instrumentation code to verify it at runtime. */
1349 cp_ubsan_maybe_instrument_return (tree fndecl
)
1351 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1352 || DECL_CONSTRUCTOR_P (fndecl
)
1353 || DECL_DESTRUCTOR_P (fndecl
)
1354 || !targetm
.warn_func_return (fndecl
))
1357 tree t
= DECL_SAVED_TREE (fndecl
);
1360 switch (TREE_CODE (t
))
1363 t
= BIND_EXPR_BODY (t
);
1365 case TRY_FINALLY_EXPR
:
1366 t
= TREE_OPERAND (t
, 0);
1368 case STATEMENT_LIST
:
1370 tree_stmt_iterator i
= tsi_last (t
);
1387 t
= DECL_SAVED_TREE (fndecl
);
1388 if (TREE_CODE (t
) == BIND_EXPR
1389 && TREE_CODE (BIND_EXPR_BODY (t
)) == STATEMENT_LIST
)
1391 tree_stmt_iterator i
= tsi_last (BIND_EXPR_BODY (t
));
1392 t
= ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl
));
1393 tsi_link_after (&i
, t
, TSI_NEW_STMT
);
1398 cp_genericize (tree fndecl
)
1402 /* Fix up the types of parms passed by invisible reference. */
1403 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1404 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1406 /* If a function's arguments are copied to create a thunk,
1407 then DECL_BY_REFERENCE will be set -- but the type of the
1408 argument will be a pointer type, so we will never get
1410 gcc_assert (!DECL_BY_REFERENCE (t
));
1411 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1412 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1413 DECL_BY_REFERENCE (t
) = 1;
1414 TREE_ADDRESSABLE (t
) = 0;
1418 /* Do the same for the return value. */
1419 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1421 t
= DECL_RESULT (fndecl
);
1422 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1423 DECL_BY_REFERENCE (t
) = 1;
1424 TREE_ADDRESSABLE (t
) = 0;
1428 /* Adjust DECL_VALUE_EXPR of the original var. */
1429 tree outer
= outer_curly_brace_block (current_function_decl
);
1433 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1434 if (DECL_NAME (t
) == DECL_NAME (var
)
1435 && DECL_HAS_VALUE_EXPR_P (var
)
1436 && DECL_VALUE_EXPR (var
) == t
)
1438 tree val
= convert_from_reference (t
);
1439 SET_DECL_VALUE_EXPR (var
, val
);
1445 /* If we're a clone, the body is already GIMPLE. */
1446 if (DECL_CLONED_FUNCTION_P (fndecl
))
1449 /* Expand all the array notations here. */
1451 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl
)))
1452 DECL_SAVED_TREE (fndecl
) =
1453 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl
));
1455 /* We do want to see every occurrence of the parms, so we can't just use
1456 walk_tree's hash functionality. */
1457 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
));
1459 if (flag_sanitize
& SANITIZE_RETURN
1460 && do_ubsan_in_current_function ())
1461 cp_ubsan_maybe_instrument_return (fndecl
);
1463 /* Do everything else. */
1464 c_genericize (fndecl
);
1466 gcc_assert (bc_label
[bc_break
] == NULL
);
1467 gcc_assert (bc_label
[bc_continue
] == NULL
);
1470 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1471 NULL if there is in fact nothing to do. ARG2 may be null if FN
1472 actually only takes one argument. */
1475 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1477 tree defparm
, parm
, t
;
1485 nargs
= list_length (DECL_ARGUMENTS (fn
));
1486 argarray
= XALLOCAVEC (tree
, nargs
);
1488 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1490 defparm
= TREE_CHAIN (defparm
);
1492 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1494 tree inner_type
= TREE_TYPE (arg1
);
1495 tree start1
, end1
, p1
;
1496 tree start2
= NULL
, p2
= NULL
;
1497 tree ret
= NULL
, lab
;
1503 inner_type
= TREE_TYPE (inner_type
);
1504 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1505 size_zero_node
, NULL
, NULL
);
1507 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1508 size_zero_node
, NULL
, NULL
);
1510 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1511 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1513 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1515 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1516 end1
= fold_build_pointer_plus (start1
, end1
);
1518 p1
= create_tmp_var (TREE_TYPE (start1
));
1519 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1520 append_to_statement_list (t
, &ret
);
1524 p2
= create_tmp_var (TREE_TYPE (start2
));
1525 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1526 append_to_statement_list (t
, &ret
);
1529 lab
= create_artificial_label (input_location
);
1530 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1531 append_to_statement_list (t
, &ret
);
1536 /* Handle default arguments. */
1537 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1538 parm
= TREE_CHAIN (parm
), i
++)
1539 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1540 TREE_PURPOSE (parm
), fn
, i
,
1541 tf_warning_or_error
);
1542 t
= build_call_a (fn
, i
, argarray
);
1543 t
= fold_convert (void_type_node
, t
);
1544 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1545 append_to_statement_list (t
, &ret
);
1547 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1548 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1549 append_to_statement_list (t
, &ret
);
1553 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1554 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1555 append_to_statement_list (t
, &ret
);
1558 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1559 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1560 append_to_statement_list (t
, &ret
);
1566 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1568 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1569 /* Handle default arguments. */
1570 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1571 parm
= TREE_CHAIN (parm
), i
++)
1572 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1573 TREE_PURPOSE (parm
),
1574 fn
, i
, tf_warning_or_error
);
1575 t
= build_call_a (fn
, i
, argarray
);
1576 t
= fold_convert (void_type_node
, t
);
1577 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1581 /* Return code to initialize DECL with its default constructor, or
1582 NULL if there's nothing to do. */
1585 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1587 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1591 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1596 /* Return code to initialize DST with a copy constructor from SRC. */
1599 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1601 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1605 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1607 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1612 /* Similarly, except use an assignment operator instead. */
1615 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1617 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1621 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1623 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1628 /* Return code to destroy DECL. */
1631 cxx_omp_clause_dtor (tree clause
, tree decl
)
1633 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1637 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1642 /* True if OpenMP should privatize what this DECL points to rather
1643 than the DECL itself. */
1646 cxx_omp_privatize_by_reference (const_tree decl
)
1648 return (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
1649 || is_invisiref_parm (decl
));
1652 /* Return true if DECL is const qualified var having no mutable member. */
1654 cxx_omp_const_qual_no_mutable (tree decl
)
1656 tree type
= TREE_TYPE (decl
);
1657 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1659 if (!is_invisiref_parm (decl
))
1661 type
= TREE_TYPE (type
);
1663 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1665 /* NVR doesn't preserve const qualification of the
1667 tree outer
= outer_curly_brace_block (current_function_decl
);
1671 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1672 if (DECL_NAME (decl
) == DECL_NAME (var
)
1673 && (TYPE_MAIN_VARIANT (type
)
1674 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1676 if (TYPE_READONLY (TREE_TYPE (var
)))
1677 type
= TREE_TYPE (var
);
1683 if (type
== error_mark_node
)
1686 /* Variables with const-qualified type having no mutable member
1687 are predetermined shared. */
1688 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1694 /* True if OpenMP sharing attribute of DECL is predetermined. */
1696 enum omp_clause_default_kind
1697 cxx_omp_predetermined_sharing (tree decl
)
1699 /* Static data members are predetermined shared. */
1700 if (TREE_STATIC (decl
))
1702 tree ctx
= CP_DECL_CONTEXT (decl
);
1703 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1704 return OMP_CLAUSE_DEFAULT_SHARED
;
1707 /* Const qualified vars having no mutable member are predetermined
1709 if (cxx_omp_const_qual_no_mutable (decl
))
1710 return OMP_CLAUSE_DEFAULT_SHARED
;
1712 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1715 /* Finalize an implicitly determined clause. */
1718 cxx_omp_finish_clause (tree c
, gimple_seq
*)
1720 tree decl
, inner_type
;
1721 bool make_shared
= false;
1723 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1726 decl
= OMP_CLAUSE_DECL (c
);
1727 decl
= require_complete_type (decl
);
1728 inner_type
= TREE_TYPE (decl
);
1729 if (decl
== error_mark_node
)
1731 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1733 if (is_invisiref_parm (decl
))
1734 inner_type
= TREE_TYPE (inner_type
);
1737 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1743 /* We're interested in the base element, not arrays. */
1744 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1745 inner_type
= TREE_TYPE (inner_type
);
1747 /* Check for special function availability by building a call to one.
1748 Save the results, because later we won't be in the right context
1749 for making these queries. */
1751 && CLASS_TYPE_P (inner_type
)
1752 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
1756 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;