1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2014 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "stor-layout.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
31 #include "basic-block.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-expr.h"
40 #include "splay-tree.h"
42 #include "c-family/c-ubsan.h"
45 /* Forward declarations. */
47 static tree
cp_genericize_r (tree
*, int *, void *);
48 static void cp_genericize_tree (tree
*);
50 /* Local declarations. */
52 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
54 /* Stack of labels which are targets for "break" or "continue",
55 linked through TREE_CHAIN. */
56 static tree bc_label
[2];
58 /* Begin a scope which can be exited by a break or continue statement. BC
61 Just creates a label with location LOCATION and pushes it into the current
65 begin_bc_block (enum bc_t bc
, location_t location
)
67 tree label
= create_artificial_label (location
);
68 DECL_CHAIN (label
) = bc_label
[bc
];
73 /* Finish a scope which can be exited by a break or continue statement.
74 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
75 an expression for the contents of the scope.
77 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
78 BLOCK. Otherwise, just forget the label. */
81 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
83 gcc_assert (label
== bc_label
[bc
]);
85 if (TREE_USED (label
))
86 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
89 bc_label
[bc
] = DECL_CHAIN (label
);
90 DECL_CHAIN (label
) = NULL_TREE
;
93 /* Get the LABEL_EXPR to represent a break or continue statement
94 in the current block scope. BC indicates which. */
97 get_bc_label (enum bc_t bc
)
99 tree label
= bc_label
[bc
];
101 /* Mark the label used for finish_bc_block. */
102 TREE_USED (label
) = 1;
106 /* Genericize a TRY_BLOCK. */
109 genericize_try_block (tree
*stmt_p
)
111 tree body
= TRY_STMTS (*stmt_p
);
112 tree cleanup
= TRY_HANDLERS (*stmt_p
);
114 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
117 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
120 genericize_catch_block (tree
*stmt_p
)
122 tree type
= HANDLER_TYPE (*stmt_p
);
123 tree body
= HANDLER_BODY (*stmt_p
);
125 /* FIXME should the caught type go in TREE_TYPE? */
126 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
129 /* A terser interface for building a representation of an exception
133 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
137 /* FIXME should the allowed types go in TREE_TYPE? */
138 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
139 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
141 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
142 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
147 /* Genericize an EH_SPEC_BLOCK by converting it to a
148 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
151 genericize_eh_spec_block (tree
*stmt_p
)
153 tree body
= EH_SPEC_STMTS (*stmt_p
);
154 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
155 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
157 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
158 TREE_NO_WARNING (*stmt_p
) = true;
159 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
162 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
165 genericize_if_stmt (tree
*stmt_p
)
167 tree stmt
, cond
, then_
, else_
;
168 location_t locus
= EXPR_LOCATION (*stmt_p
);
171 cond
= IF_COND (stmt
);
172 then_
= THEN_CLAUSE (stmt
);
173 else_
= ELSE_CLAUSE (stmt
);
176 then_
= build_empty_stmt (locus
);
178 else_
= build_empty_stmt (locus
);
180 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
182 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
185 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
186 if (CAN_HAVE_LOCATION_P (stmt
) && !EXPR_HAS_LOCATION (stmt
))
187 SET_EXPR_LOCATION (stmt
, locus
);
191 /* Build a generic representation of one of the C loop forms. COND is the
192 loop condition or NULL_TREE. BODY is the (possibly compound) statement
193 controlled by the loop. INCR is the increment expression of a for-loop,
194 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
195 evaluated before the loop body as in while and for loops, or after the
196 loop body as in do-while loops. */
199 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
200 tree incr
, bool cond_is_first
, int *walk_subtrees
,
204 tree entry
= NULL
, exit
= NULL
, t
;
205 tree stmt_list
= NULL
;
207 blab
= begin_bc_block (bc_break
, start_locus
);
208 clab
= begin_bc_block (bc_continue
, start_locus
);
210 if (incr
&& EXPR_P (incr
))
211 SET_EXPR_LOCATION (incr
, start_locus
);
213 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
214 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
215 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
218 /* If condition is zero don't generate a loop construct. */
219 if (cond
&& integer_zerop (cond
))
223 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
224 get_bc_label (bc_break
));
225 append_to_statement_list (t
, &stmt_list
);
230 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
231 tree top
= build1 (LABEL_EXPR
, void_type_node
,
232 create_artificial_label (start_locus
));
234 /* If we have an exit condition, then we build an IF with gotos either
235 out of the loop, or to the top of it. If there's no exit condition,
236 then we just build a jump back to the top. */
237 exit
= build1 (GOTO_EXPR
, void_type_node
, LABEL_EXPR_LABEL (top
));
239 if (cond
&& !integer_nonzerop (cond
))
241 /* Canonicalize the loop condition to the end. This means
242 generating a branch to the loop condition. Reuse the
243 continue label, if possible. */
248 entry
= build1 (LABEL_EXPR
, void_type_node
,
249 create_artificial_label (start_locus
));
250 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
251 LABEL_EXPR_LABEL (entry
));
254 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
255 get_bc_label (bc_continue
));
256 append_to_statement_list (t
, &stmt_list
);
259 t
= build1 (GOTO_EXPR
, void_type_node
, get_bc_label (bc_break
));
260 exit
= fold_build3_loc (start_locus
,
261 COND_EXPR
, void_type_node
, cond
, exit
, t
);
264 append_to_statement_list (top
, &stmt_list
);
267 append_to_statement_list (body
, &stmt_list
);
268 finish_bc_block (&stmt_list
, bc_continue
, clab
);
269 append_to_statement_list (incr
, &stmt_list
);
270 append_to_statement_list (entry
, &stmt_list
);
271 append_to_statement_list (exit
, &stmt_list
);
272 finish_bc_block (&stmt_list
, bc_break
, blab
);
274 if (stmt_list
== NULL_TREE
)
275 stmt_list
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
280 /* Genericize a FOR_STMT node *STMT_P. */
283 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
288 tree init
= FOR_INIT_STMT (stmt
);
292 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
293 append_to_statement_list (init
, &expr
);
296 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
297 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
298 append_to_statement_list (loop
, &expr
);
302 /* Genericize a WHILE_STMT node *STMT_P. */
305 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
308 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
309 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
312 /* Genericize a DO_STMT node *STMT_P. */
315 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
318 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
319 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
322 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
325 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
328 tree break_block
, body
, cond
, type
;
329 location_t stmt_locus
= EXPR_LOCATION (stmt
);
331 break_block
= begin_bc_block (bc_break
, stmt_locus
);
333 body
= SWITCH_STMT_BODY (stmt
);
335 body
= build_empty_stmt (stmt_locus
);
336 cond
= SWITCH_STMT_COND (stmt
);
337 type
= SWITCH_STMT_TYPE (stmt
);
339 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
340 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
341 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
344 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
345 finish_bc_block (stmt_p
, bc_break
, break_block
);
348 /* Genericize a CONTINUE_STMT node *STMT_P. */
351 genericize_continue_stmt (tree
*stmt_p
)
353 tree stmt_list
= NULL
;
354 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
355 tree label
= get_bc_label (bc_continue
);
356 location_t location
= EXPR_LOCATION (*stmt_p
);
357 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
358 append_to_statement_list (pred
, &stmt_list
);
359 append_to_statement_list (jump
, &stmt_list
);
363 /* Genericize a BREAK_STMT node *STMT_P. */
366 genericize_break_stmt (tree
*stmt_p
)
368 tree label
= get_bc_label (bc_break
);
369 location_t location
= EXPR_LOCATION (*stmt_p
);
370 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
373 /* Genericize a OMP_FOR node *STMT_P. */
376 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
379 location_t locus
= EXPR_LOCATION (stmt
);
380 tree clab
= begin_bc_block (bc_continue
, locus
);
382 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
383 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
384 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
385 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
386 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
387 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
390 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
393 /* Hook into the middle of gimplifying an OMP_FOR node. */
395 static enum gimplify_status
396 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
398 tree for_stmt
= *expr_p
;
399 gimple_seq seq
= NULL
;
401 /* Protect ourselves from recursion. */
402 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
404 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
406 gimplify_and_add (for_stmt
, &seq
);
407 gimple_seq_add_seq (pre_p
, seq
);
409 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
414 /* Gimplify an EXPR_STMT node. */
417 gimplify_expr_stmt (tree
*stmt_p
)
419 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
421 if (stmt
== error_mark_node
)
424 /* Gimplification of a statement expression will nullify the
425 statement if all its side effects are moved to *PRE_P and *POST_P.
427 In this case we will not want to emit the gimplified statement.
428 However, we may still want to emit a warning, so we do that before
430 if (stmt
&& warn_unused_value
)
432 if (!TREE_SIDE_EFFECTS (stmt
))
434 if (!IS_EMPTY_STMT (stmt
)
435 && !VOID_TYPE_P (TREE_TYPE (stmt
))
436 && !TREE_NO_WARNING (stmt
))
437 warning (OPT_Wunused_value
, "statement with no effect");
440 warn_if_unused_value (stmt
, input_location
);
443 if (stmt
== NULL_TREE
)
444 stmt
= alloc_stmt_list ();
449 /* Gimplify initialization from an AGGR_INIT_EXPR. */
452 cp_gimplify_init_expr (tree
*expr_p
)
454 tree from
= TREE_OPERAND (*expr_p
, 1);
455 tree to
= TREE_OPERAND (*expr_p
, 0);
458 /* What about code that pulls out the temp and uses it elsewhere? I
459 think that such code never uses the TARGET_EXPR as an initializer. If
460 I'm wrong, we'll abort because the temp won't have any RTL. In that
461 case, I guess we'll need to replace references somehow. */
462 if (TREE_CODE (from
) == TARGET_EXPR
)
463 from
= TARGET_EXPR_INITIAL (from
);
465 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
466 inside the TARGET_EXPR. */
469 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
471 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
472 replace the slot operand with our target.
474 Should we add a target parm to gimplify_expr instead? No, as in this
475 case we want to replace the INIT_EXPR. */
476 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
477 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
479 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
480 AGGR_INIT_EXPR_SLOT (sub
) = to
;
482 VEC_INIT_EXPR_SLOT (sub
) = to
;
485 /* The initialization is now a side-effect, so the container can
488 TREE_TYPE (from
) = void_type_node
;
494 t
= TREE_OPERAND (t
, 1);
499 /* Gimplify a MUST_NOT_THROW_EXPR. */
501 static enum gimplify_status
502 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
505 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
506 tree body
= TREE_OPERAND (stmt
, 0);
507 gimple_seq try_
= NULL
;
508 gimple_seq catch_
= NULL
;
511 gimplify_and_add (body
, &try_
);
512 mnt
= gimple_build_eh_must_not_throw (terminate_node
);
513 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
514 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
516 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
527 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
530 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
532 int saved_stmts_are_full_exprs_p
= 0;
533 enum tree_code code
= TREE_CODE (*expr_p
);
534 enum gimplify_status ret
;
536 if (STATEMENT_CODE_P (code
))
538 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
539 current_stmt_tree ()->stmts_are_full_exprs_p
540 = STMT_IS_FULL_EXPR_P (*expr_p
);
546 *expr_p
= cplus_expand_constant (*expr_p
);
551 simplify_aggr_init_expr (expr_p
);
557 location_t loc
= input_location
;
558 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
559 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
560 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
561 input_location
= EXPR_LOCATION (*expr_p
);
562 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
563 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
565 tf_warning_or_error
);
566 cp_genericize_tree (expr_p
);
568 input_location
= loc
;
573 /* FIXME communicate throw type to back end, probably by moving
574 THROW_EXPR into ../tree.def. */
575 *expr_p
= TREE_OPERAND (*expr_p
, 0);
579 case MUST_NOT_THROW_EXPR
:
580 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
583 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
584 LHS of an assignment might also be involved in the RHS, as in bug
587 if (fn_contains_cilk_spawn_p (cfun
)
588 && cilk_detect_spawn_and_unwrap (expr_p
)
590 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
591 cp_gimplify_init_expr (expr_p
);
592 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
594 /* Otherwise fall through. */
597 if (fn_contains_cilk_spawn_p (cfun
)
598 && cilk_detect_spawn_and_unwrap (expr_p
)
600 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
602 /* If the back end isn't clever enough to know that the lhs and rhs
603 types are the same, add an explicit conversion. */
604 tree op0
= TREE_OPERAND (*expr_p
, 0);
605 tree op1
= TREE_OPERAND (*expr_p
, 1);
607 if (!error_operand_p (op0
)
608 && !error_operand_p (op1
)
609 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
610 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
611 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
612 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
613 TREE_TYPE (op0
), op1
);
615 else if ((is_gimple_lvalue (op1
) || INDIRECT_REF_P (op1
)
616 || (TREE_CODE (op1
) == CONSTRUCTOR
617 && CONSTRUCTOR_NELTS (op1
) == 0
618 && !TREE_CLOBBER_P (op1
))
619 || (TREE_CODE (op1
) == CALL_EXPR
620 && !CALL_EXPR_RETURN_SLOT_OPT (op1
)))
621 && is_really_empty_class (TREE_TYPE (op0
)))
623 /* Remove any copies of empty classes. We check that the RHS
624 has a simple form so that TARGET_EXPRs and non-empty
625 CONSTRUCTORs get reduced properly, and we leave the return
626 slot optimization alone because it isn't a copy (FIXME so it
627 shouldn't be represented as one).
629 Also drop volatile variables on the RHS to avoid infinite
630 recursion from gimplify_expr trying to load the value. */
631 if (!TREE_SIDE_EFFECTS (op1
))
633 else if (TREE_THIS_VOLATILE (op1
)
634 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
635 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
636 build_fold_addr_expr (op1
), op0
);
638 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
645 case EMPTY_CLASS_EXPR
:
646 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
647 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
652 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
657 genericize_try_block (expr_p
);
662 genericize_catch_block (expr_p
);
667 genericize_eh_spec_block (expr_p
);
685 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
689 gimplify_expr_stmt (expr_p
);
693 case UNARY_PLUS_EXPR
:
695 tree arg
= TREE_OPERAND (*expr_p
, 0);
696 tree type
= TREE_TYPE (*expr_p
);
697 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
703 case CILK_SPAWN_STMT
:
705 (fn_contains_cilk_spawn_p (cfun
)
706 && cilk_detect_spawn_and_unwrap (expr_p
));
708 /* If errors are seen, then just process it as a CALL_EXPR. */
710 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
713 if (fn_contains_cilk_spawn_p (cfun
)
714 && cilk_detect_spawn_and_unwrap (expr_p
)
716 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
718 /* DR 1030 says that we need to evaluate the elements of an
719 initializer-list in forward order even when it's used as arguments to
720 a constructor. So if the target wants to evaluate them in reverse
721 order and there's more than one argument other than 'this', gimplify
724 if (PUSH_ARGS_REVERSED
&& CALL_EXPR_LIST_INIT_P (*expr_p
)
725 && call_expr_nargs (*expr_p
) > 2)
727 int nargs
= call_expr_nargs (*expr_p
);
728 location_t loc
= EXPR_LOC_OR_LOC (*expr_p
, input_location
);
729 for (int i
= 1; i
< nargs
; ++i
)
731 enum gimplify_status t
732 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
);
740 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
744 /* Restore saved state. */
745 if (STATEMENT_CODE_P (code
))
746 current_stmt_tree ()->stmts_are_full_exprs_p
747 = saved_stmts_are_full_exprs_p
;
753 is_invisiref_parm (const_tree t
)
755 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
756 && DECL_BY_REFERENCE (t
));
759 /* Return true if the uid in both int tree maps are equal. */
762 cxx_int_tree_map_hasher::equal (cxx_int_tree_map
*a
, cxx_int_tree_map
*b
)
764 return (a
->uid
== b
->uid
);
767 /* Hash a UID in a cxx_int_tree_map. */
770 cxx_int_tree_map_hasher::hash (cxx_int_tree_map
*item
)
775 /* A stable comparison routine for use with splay trees and DECLs. */
778 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
783 return DECL_UID (a
) - DECL_UID (b
);
786 /* OpenMP context during genericization. */
788 struct cp_genericize_omp_taskreg
792 struct cp_genericize_omp_taskreg
*outer
;
793 splay_tree variables
;
796 /* Return true if genericization should try to determine if
797 DECL is firstprivate or shared within task regions. */
800 omp_var_to_track (tree decl
)
802 tree type
= TREE_TYPE (decl
);
803 if (is_invisiref_parm (decl
))
804 type
= TREE_TYPE (type
);
805 while (TREE_CODE (type
) == ARRAY_TYPE
)
806 type
= TREE_TYPE (type
);
807 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
809 if (VAR_P (decl
) && DECL_THREAD_LOCAL_P (decl
))
811 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
816 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
819 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
821 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
822 (splay_tree_key
) decl
);
825 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
827 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
828 if (!omp_ctx
->default_shared
)
830 struct cp_genericize_omp_taskreg
*octx
;
832 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
834 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
835 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
837 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
840 if (octx
->is_parallel
)
844 && (TREE_CODE (decl
) == PARM_DECL
845 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
846 && DECL_CONTEXT (decl
) == current_function_decl
)))
847 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
848 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
850 /* DECL is implicitly determined firstprivate in
851 the current task construct. Ensure copy ctor and
852 dtor are instantiated, because during gimplification
853 it will be already too late. */
854 tree type
= TREE_TYPE (decl
);
855 if (is_invisiref_parm (decl
))
856 type
= TREE_TYPE (type
);
857 while (TREE_CODE (type
) == ARRAY_TYPE
)
858 type
= TREE_TYPE (type
);
859 get_copy_ctor (type
, tf_none
);
860 get_dtor (type
, tf_none
);
863 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
867 /* Genericization context. */
869 struct cp_genericize_data
871 hash_set
<tree
> *p_set
;
872 vec
<tree
> bind_expr_stack
;
873 struct cp_genericize_omp_taskreg
*omp_ctx
;
876 /* Perform any pre-gimplification lowering of C++ front end trees to
880 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
883 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
884 hash_set
<tree
> *p_set
= wtd
->p_set
;
886 /* If in an OpenMP context, note var uses. */
887 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
889 || TREE_CODE (stmt
) == PARM_DECL
890 || TREE_CODE (stmt
) == RESULT_DECL
)
891 && omp_var_to_track (stmt
))
892 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
894 if (is_invisiref_parm (stmt
)
895 /* Don't dereference parms in a thunk, pass the references through. */
896 && !(DECL_THUNK_P (current_function_decl
)
897 && TREE_CODE (stmt
) == PARM_DECL
))
899 *stmt_p
= convert_from_reference (stmt
);
904 /* Map block scope extern declarations to visible declarations with the
905 same name and type in outer scopes if any. */
906 if (cp_function_chain
->extern_decl_map
907 && VAR_OR_FUNCTION_DECL_P (stmt
)
908 && DECL_EXTERNAL (stmt
))
910 struct cxx_int_tree_map
*h
, in
;
911 in
.uid
= DECL_UID (stmt
);
912 h
= cp_function_chain
->extern_decl_map
->find_with_hash (&in
, in
.uid
);
921 /* Other than invisiref parms, don't walk the same tree twice. */
922 if (p_set
->contains (stmt
))
928 if (TREE_CODE (stmt
) == ADDR_EXPR
929 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
931 /* If in an OpenMP context, note var uses. */
932 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
933 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
934 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
935 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
938 else if (TREE_CODE (stmt
) == RETURN_EXPR
939 && TREE_OPERAND (stmt
, 0)
940 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
941 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
943 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
944 switch (OMP_CLAUSE_CODE (stmt
))
946 case OMP_CLAUSE_LASTPRIVATE
:
947 /* Don't dereference an invisiref in OpenMP clauses. */
948 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
951 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
952 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
953 cp_genericize_r
, data
, NULL
);
956 case OMP_CLAUSE_PRIVATE
:
957 /* Don't dereference an invisiref in OpenMP clauses. */
958 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
960 else if (wtd
->omp_ctx
!= NULL
)
962 /* Private clause doesn't cause any references to the
963 var in outer contexts, avoid calling
964 omp_cxx_notice_variable for it. */
965 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
967 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
973 case OMP_CLAUSE_SHARED
:
974 case OMP_CLAUSE_FIRSTPRIVATE
:
975 case OMP_CLAUSE_COPYIN
:
976 case OMP_CLAUSE_COPYPRIVATE
:
977 /* Don't dereference an invisiref in OpenMP clauses. */
978 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
981 case OMP_CLAUSE_REDUCTION
:
982 /* Don't dereference an invisiref in reduction clause's
983 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
984 still needs to be genericized. */
985 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
988 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
989 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
990 cp_genericize_r
, data
, NULL
);
991 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
992 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
993 cp_genericize_r
, data
, NULL
);
999 else if (IS_TYPE_OR_DECL_P (stmt
))
1002 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1003 to lower this construct before scanning it, so we need to lower these
1004 before doing anything else. */
1005 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
1006 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1007 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1010 CLEANUP_BODY (stmt
),
1011 CLEANUP_EXPR (stmt
));
1013 else if (TREE_CODE (stmt
) == IF_STMT
)
1015 genericize_if_stmt (stmt_p
);
1016 /* *stmt_p has changed, tail recurse to handle it again. */
1017 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1020 /* COND_EXPR might have incompatible types in branches if one or both
1021 arms are bitfields. Fix it up now. */
1022 else if (TREE_CODE (stmt
) == COND_EXPR
)
1025 = (TREE_OPERAND (stmt
, 1)
1026 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1029 = (TREE_OPERAND (stmt
, 2)
1030 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1033 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1034 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1036 TREE_OPERAND (stmt
, 1)
1037 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1038 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1042 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1043 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1045 TREE_OPERAND (stmt
, 2)
1046 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1047 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1052 else if (TREE_CODE (stmt
) == BIND_EXPR
)
1054 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1057 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1059 && !DECL_EXTERNAL (decl
)
1060 && omp_var_to_track (decl
))
1063 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1064 (splay_tree_key
) decl
);
1066 splay_tree_insert (wtd
->omp_ctx
->variables
,
1067 (splay_tree_key
) decl
,
1069 ? OMP_CLAUSE_DEFAULT_SHARED
1070 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1073 wtd
->bind_expr_stack
.safe_push (stmt
);
1074 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1075 cp_genericize_r
, data
, NULL
);
1076 wtd
->bind_expr_stack
.pop ();
1079 else if (TREE_CODE (stmt
) == USING_STMT
)
1081 tree block
= NULL_TREE
;
1083 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1084 BLOCK, and append an IMPORTED_DECL to its
1085 BLOCK_VARS chained list. */
1086 if (wtd
->bind_expr_stack
.exists ())
1089 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1090 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1095 tree using_directive
;
1096 gcc_assert (TREE_OPERAND (stmt
, 0));
1098 using_directive
= make_node (IMPORTED_DECL
);
1099 TREE_TYPE (using_directive
) = void_type_node
;
1101 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1102 = TREE_OPERAND (stmt
, 0);
1103 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1104 BLOCK_VARS (block
) = using_directive
;
1106 /* The USING_STMT won't appear in GENERIC. */
1107 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1111 else if (TREE_CODE (stmt
) == DECL_EXPR
1112 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1114 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1115 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1118 else if (TREE_CODE (stmt
) == OMP_PARALLEL
|| TREE_CODE (stmt
) == OMP_TASK
)
1120 struct cp_genericize_omp_taskreg omp_ctx
;
1125 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1126 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1127 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1128 omp_ctx
.outer
= wtd
->omp_ctx
;
1129 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1130 wtd
->omp_ctx
= &omp_ctx
;
1131 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1132 switch (OMP_CLAUSE_CODE (c
))
1134 case OMP_CLAUSE_SHARED
:
1135 case OMP_CLAUSE_PRIVATE
:
1136 case OMP_CLAUSE_FIRSTPRIVATE
:
1137 case OMP_CLAUSE_LASTPRIVATE
:
1138 decl
= OMP_CLAUSE_DECL (c
);
1139 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1141 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1144 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1145 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1146 ? OMP_CLAUSE_DEFAULT_SHARED
1147 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1148 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
1150 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1152 case OMP_CLAUSE_DEFAULT
:
1153 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1154 omp_ctx
.default_shared
= true;
1158 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1159 wtd
->omp_ctx
= omp_ctx
.outer
;
1160 splay_tree_delete (omp_ctx
.variables
);
1162 else if (TREE_CODE (stmt
) == CONVERT_EXPR
)
1163 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1164 else if (TREE_CODE (stmt
) == FOR_STMT
)
1165 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1166 else if (TREE_CODE (stmt
) == WHILE_STMT
)
1167 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1168 else if (TREE_CODE (stmt
) == DO_STMT
)
1169 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1170 else if (TREE_CODE (stmt
) == SWITCH_STMT
)
1171 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1172 else if (TREE_CODE (stmt
) == CONTINUE_STMT
)
1173 genericize_continue_stmt (stmt_p
);
1174 else if (TREE_CODE (stmt
) == BREAK_STMT
)
1175 genericize_break_stmt (stmt_p
);
1176 else if (TREE_CODE (stmt
) == OMP_FOR
1177 || TREE_CODE (stmt
) == OMP_SIMD
1178 || TREE_CODE (stmt
) == OMP_DISTRIBUTE
)
1179 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1180 else if (TREE_CODE (stmt
) == SIZEOF_EXPR
)
1182 if (SIZEOF_EXPR_TYPE_P (stmt
))
1184 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt
, 0)),
1185 SIZEOF_EXPR
, false);
1186 else if (TYPE_P (TREE_OPERAND (stmt
, 0)))
1187 *stmt_p
= cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt
, 0),
1188 SIZEOF_EXPR
, false);
1190 *stmt_p
= cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt
, 0),
1191 SIZEOF_EXPR
, false);
1192 if (*stmt_p
== error_mark_node
)
1193 *stmt_p
= size_one_node
;
1196 else if (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1198 if (TREE_CODE (stmt
) == NOP_EXPR
1199 && TREE_CODE (TREE_TYPE (stmt
)) == REFERENCE_TYPE
)
1200 ubsan_maybe_instrument_reference (stmt
);
1201 else if (TREE_CODE (stmt
) == CALL_EXPR
)
1203 tree fn
= CALL_EXPR_FN (stmt
);
1205 && !error_operand_p (fn
)
1206 && POINTER_TYPE_P (TREE_TYPE (fn
))
1207 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1210 = TREE_CODE (fn
) == ADDR_EXPR
1211 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1212 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1213 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1218 p_set
->add (*stmt_p
);
1223 /* Lower C++ front end trees to GENERIC in T_P. */
1226 cp_genericize_tree (tree
* t_p
)
1228 struct cp_genericize_data wtd
;
1230 wtd
.p_set
= new hash_set
<tree
>;
1231 wtd
.bind_expr_stack
.create (0);
1233 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1235 wtd
.bind_expr_stack
.release ();
1238 /* If a function that should end with a return in non-void
1239 function doesn't obviously end with return, add ubsan
1240 instrumentation code to verify it at runtime. */
1243 cp_ubsan_maybe_instrument_return (tree fndecl
)
1245 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1246 || DECL_CONSTRUCTOR_P (fndecl
)
1247 || DECL_DESTRUCTOR_P (fndecl
)
1248 || !targetm
.warn_func_return (fndecl
))
1251 tree t
= DECL_SAVED_TREE (fndecl
);
1254 switch (TREE_CODE (t
))
1257 t
= BIND_EXPR_BODY (t
);
1259 case TRY_FINALLY_EXPR
:
1260 t
= TREE_OPERAND (t
, 0);
1262 case STATEMENT_LIST
:
1264 tree_stmt_iterator i
= tsi_last (t
);
1281 t
= DECL_SAVED_TREE (fndecl
);
1282 if (TREE_CODE (t
) == BIND_EXPR
1283 && TREE_CODE (BIND_EXPR_BODY (t
)) == STATEMENT_LIST
)
1285 tree_stmt_iterator i
= tsi_last (BIND_EXPR_BODY (t
));
1286 t
= ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl
));
1287 tsi_link_after (&i
, t
, TSI_NEW_STMT
);
1292 cp_genericize (tree fndecl
)
1296 /* Fix up the types of parms passed by invisible reference. */
1297 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1298 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1300 /* If a function's arguments are copied to create a thunk,
1301 then DECL_BY_REFERENCE will be set -- but the type of the
1302 argument will be a pointer type, so we will never get
1304 gcc_assert (!DECL_BY_REFERENCE (t
));
1305 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1306 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1307 DECL_BY_REFERENCE (t
) = 1;
1308 TREE_ADDRESSABLE (t
) = 0;
1312 /* Do the same for the return value. */
1313 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1315 t
= DECL_RESULT (fndecl
);
1316 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1317 DECL_BY_REFERENCE (t
) = 1;
1318 TREE_ADDRESSABLE (t
) = 0;
1322 /* Adjust DECL_VALUE_EXPR of the original var. */
1323 tree outer
= outer_curly_brace_block (current_function_decl
);
1327 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1328 if (DECL_NAME (t
) == DECL_NAME (var
)
1329 && DECL_HAS_VALUE_EXPR_P (var
)
1330 && DECL_VALUE_EXPR (var
) == t
)
1332 tree val
= convert_from_reference (t
);
1333 SET_DECL_VALUE_EXPR (var
, val
);
1339 /* If we're a clone, the body is already GIMPLE. */
1340 if (DECL_CLONED_FUNCTION_P (fndecl
))
1343 /* Expand all the array notations here. */
1345 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl
)))
1346 DECL_SAVED_TREE (fndecl
) =
1347 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl
));
1349 /* We do want to see every occurrence of the parms, so we can't just use
1350 walk_tree's hash functionality. */
1351 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
));
1353 if (flag_sanitize
& SANITIZE_RETURN
1354 && current_function_decl
!= NULL_TREE
1355 && !lookup_attribute ("no_sanitize_undefined",
1356 DECL_ATTRIBUTES (current_function_decl
)))
1357 cp_ubsan_maybe_instrument_return (fndecl
);
1359 /* Do everything else. */
1360 c_genericize (fndecl
);
1362 gcc_assert (bc_label
[bc_break
] == NULL
);
1363 gcc_assert (bc_label
[bc_continue
] == NULL
);
1366 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1367 NULL if there is in fact nothing to do. ARG2 may be null if FN
1368 actually only takes one argument. */
1371 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1373 tree defparm
, parm
, t
;
1381 nargs
= list_length (DECL_ARGUMENTS (fn
));
1382 argarray
= XALLOCAVEC (tree
, nargs
);
1384 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1386 defparm
= TREE_CHAIN (defparm
);
1388 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1390 tree inner_type
= TREE_TYPE (arg1
);
1391 tree start1
, end1
, p1
;
1392 tree start2
= NULL
, p2
= NULL
;
1393 tree ret
= NULL
, lab
;
1399 inner_type
= TREE_TYPE (inner_type
);
1400 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1401 size_zero_node
, NULL
, NULL
);
1403 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1404 size_zero_node
, NULL
, NULL
);
1406 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1407 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1409 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1411 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1412 end1
= fold_build_pointer_plus (start1
, end1
);
1414 p1
= create_tmp_var (TREE_TYPE (start1
), NULL
);
1415 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1416 append_to_statement_list (t
, &ret
);
1420 p2
= create_tmp_var (TREE_TYPE (start2
), NULL
);
1421 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1422 append_to_statement_list (t
, &ret
);
1425 lab
= create_artificial_label (input_location
);
1426 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1427 append_to_statement_list (t
, &ret
);
1432 /* Handle default arguments. */
1433 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1434 parm
= TREE_CHAIN (parm
), i
++)
1435 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1436 TREE_PURPOSE (parm
), fn
, i
,
1437 tf_warning_or_error
);
1438 t
= build_call_a (fn
, i
, argarray
);
1439 t
= fold_convert (void_type_node
, t
);
1440 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1441 append_to_statement_list (t
, &ret
);
1443 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1444 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1445 append_to_statement_list (t
, &ret
);
1449 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1450 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1451 append_to_statement_list (t
, &ret
);
1454 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1455 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1456 append_to_statement_list (t
, &ret
);
1462 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1464 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1465 /* Handle default arguments. */
1466 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1467 parm
= TREE_CHAIN (parm
), i
++)
1468 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1469 TREE_PURPOSE (parm
),
1470 fn
, i
, tf_warning_or_error
);
1471 t
= build_call_a (fn
, i
, argarray
);
1472 t
= fold_convert (void_type_node
, t
);
1473 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1477 /* Return code to initialize DECL with its default constructor, or
1478 NULL if there's nothing to do. */
1481 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1483 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1487 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1492 /* Return code to initialize DST with a copy constructor from SRC. */
1495 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1497 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1501 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1503 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1508 /* Similarly, except use an assignment operator instead. */
1511 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1513 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1517 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1519 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1524 /* Return code to destroy DECL. */
1527 cxx_omp_clause_dtor (tree clause
, tree decl
)
1529 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1533 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1538 /* True if OpenMP should privatize what this DECL points to rather
1539 than the DECL itself. */
1542 cxx_omp_privatize_by_reference (const_tree decl
)
1544 return (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
1545 || is_invisiref_parm (decl
));
1548 /* Return true if DECL is const qualified var having no mutable member. */
1550 cxx_omp_const_qual_no_mutable (tree decl
)
1552 tree type
= TREE_TYPE (decl
);
1553 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1555 if (!is_invisiref_parm (decl
))
1557 type
= TREE_TYPE (type
);
1559 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1561 /* NVR doesn't preserve const qualification of the
1563 tree outer
= outer_curly_brace_block (current_function_decl
);
1567 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1568 if (DECL_NAME (decl
) == DECL_NAME (var
)
1569 && (TYPE_MAIN_VARIANT (type
)
1570 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1572 if (TYPE_READONLY (TREE_TYPE (var
)))
1573 type
= TREE_TYPE (var
);
1579 if (type
== error_mark_node
)
1582 /* Variables with const-qualified type having no mutable member
1583 are predetermined shared. */
1584 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1590 /* True if OpenMP sharing attribute of DECL is predetermined. */
1592 enum omp_clause_default_kind
1593 cxx_omp_predetermined_sharing (tree decl
)
1595 /* Static data members are predetermined shared. */
1596 if (TREE_STATIC (decl
))
1598 tree ctx
= CP_DECL_CONTEXT (decl
);
1599 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1600 return OMP_CLAUSE_DEFAULT_SHARED
;
1603 /* Const qualified vars having no mutable member are predetermined
1605 if (cxx_omp_const_qual_no_mutable (decl
))
1606 return OMP_CLAUSE_DEFAULT_SHARED
;
1608 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1611 /* Finalize an implicitly determined clause. */
1614 cxx_omp_finish_clause (tree c
, gimple_seq
*)
1616 tree decl
, inner_type
;
1617 bool make_shared
= false;
1619 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1622 decl
= OMP_CLAUSE_DECL (c
);
1623 decl
= require_complete_type (decl
);
1624 inner_type
= TREE_TYPE (decl
);
1625 if (decl
== error_mark_node
)
1627 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1629 if (is_invisiref_parm (decl
))
1630 inner_type
= TREE_TYPE (inner_type
);
1633 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1639 /* We're interested in the base element, not arrays. */
1640 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1641 inner_type
= TREE_TYPE (inner_type
);
1643 /* Check for special function availability by building a call to one.
1644 Save the results, because later we won't be in the right context
1645 for making these queries. */
1647 && CLASS_TYPE_P (inner_type
)
1648 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
1652 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;